var/home/core/zuul-output/0000755000175000017500000000000015111014012014507 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015111020317015460 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004100512315111020310017651 0ustar rootrootNov 24 08:35:26 crc systemd[1]: Starting Kubernetes Kubelet... Nov 24 08:35:26 crc restorecon[4699]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:26 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 08:35:27 crc restorecon[4699]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 08:35:27 crc restorecon[4699]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Nov 24 08:35:28 crc kubenswrapper[4718]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 24 08:35:28 crc kubenswrapper[4718]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Nov 24 08:35:28 crc kubenswrapper[4718]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 24 08:35:28 crc kubenswrapper[4718]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 24 08:35:28 crc kubenswrapper[4718]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Nov 24 08:35:28 crc kubenswrapper[4718]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.319241 4718 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325696 4718 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325725 4718 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325753 4718 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325762 4718 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325772 4718 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325782 4718 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325791 4718 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325799 4718 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325807 4718 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325814 4718 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325822 4718 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325831 4718 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325838 4718 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325846 4718 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325854 4718 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325861 4718 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325870 4718 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325878 4718 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325885 4718 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325893 4718 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325904 4718 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325914 4718 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325922 4718 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325930 4718 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325938 4718 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325946 4718 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325954 4718 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325962 4718 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.325969 4718 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326005 4718 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326013 4718 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326021 4718 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326028 4718 feature_gate.go:330] unrecognized feature gate: Example Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326038 4718 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326047 4718 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326058 4718 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326068 4718 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326078 4718 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326090 4718 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326099 4718 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326110 4718 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326117 4718 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326126 4718 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326136 4718 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326144 4718 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326152 4718 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326160 4718 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326169 4718 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326177 4718 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326187 4718 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326196 4718 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326207 4718 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326218 4718 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326226 4718 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326235 4718 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326243 4718 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326251 4718 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326261 4718 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326272 4718 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326283 4718 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326292 4718 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326301 4718 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326309 4718 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326317 4718 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326325 4718 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326332 4718 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326340 4718 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326348 4718 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326356 4718 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326363 4718 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.326371 4718 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327455 4718 flags.go:64] FLAG: --address="0.0.0.0" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327478 4718 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327492 4718 flags.go:64] FLAG: --anonymous-auth="true" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327509 4718 flags.go:64] FLAG: --application-metrics-count-limit="100" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327527 4718 flags.go:64] FLAG: --authentication-token-webhook="false" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327537 4718 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327549 4718 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327561 4718 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327571 4718 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327581 4718 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327591 4718 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327600 4718 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327609 4718 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327618 4718 flags.go:64] FLAG: --cgroup-root="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327628 4718 flags.go:64] FLAG: --cgroups-per-qos="true" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327637 4718 flags.go:64] FLAG: --client-ca-file="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327646 4718 flags.go:64] FLAG: --cloud-config="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327654 4718 flags.go:64] FLAG: --cloud-provider="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327663 4718 flags.go:64] FLAG: --cluster-dns="[]" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327674 4718 flags.go:64] FLAG: --cluster-domain="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327683 4718 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327692 4718 flags.go:64] FLAG: --config-dir="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327701 4718 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327711 4718 flags.go:64] FLAG: --container-log-max-files="5" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327722 4718 flags.go:64] FLAG: --container-log-max-size="10Mi" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327731 4718 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327740 4718 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327750 4718 flags.go:64] FLAG: --containerd-namespace="k8s.io" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327760 4718 flags.go:64] FLAG: --contention-profiling="false" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327770 4718 flags.go:64] FLAG: --cpu-cfs-quota="true" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327779 4718 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327791 4718 flags.go:64] FLAG: --cpu-manager-policy="none" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327799 4718 flags.go:64] FLAG: --cpu-manager-policy-options="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327811 4718 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327820 4718 flags.go:64] FLAG: --enable-controller-attach-detach="true" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327829 4718 flags.go:64] FLAG: --enable-debugging-handlers="true" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327838 4718 flags.go:64] FLAG: --enable-load-reader="false" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327847 4718 flags.go:64] FLAG: --enable-server="true" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327856 4718 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327868 4718 flags.go:64] FLAG: --event-burst="100" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327877 4718 flags.go:64] FLAG: --event-qps="50" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327886 4718 flags.go:64] FLAG: --event-storage-age-limit="default=0" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327895 4718 flags.go:64] FLAG: --event-storage-event-limit="default=0" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327904 4718 flags.go:64] FLAG: --eviction-hard="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327915 4718 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327924 4718 flags.go:64] FLAG: --eviction-minimum-reclaim="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327933 4718 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327942 4718 flags.go:64] FLAG: --eviction-soft="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327951 4718 flags.go:64] FLAG: --eviction-soft-grace-period="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327960 4718 flags.go:64] FLAG: --exit-on-lock-contention="false" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.327997 4718 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328007 4718 flags.go:64] FLAG: --experimental-mounter-path="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328016 4718 flags.go:64] FLAG: --fail-cgroupv1="false" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328025 4718 flags.go:64] FLAG: --fail-swap-on="true" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328035 4718 flags.go:64] FLAG: --feature-gates="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328049 4718 flags.go:64] FLAG: --file-check-frequency="20s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328061 4718 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328073 4718 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328085 4718 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328097 4718 flags.go:64] FLAG: --healthz-port="10248" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328110 4718 flags.go:64] FLAG: --help="false" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328120 4718 flags.go:64] FLAG: --hostname-override="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328129 4718 flags.go:64] FLAG: --housekeeping-interval="10s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328141 4718 flags.go:64] FLAG: --http-check-frequency="20s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328151 4718 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328160 4718 flags.go:64] FLAG: --image-credential-provider-config="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328169 4718 flags.go:64] FLAG: --image-gc-high-threshold="85" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328178 4718 flags.go:64] FLAG: --image-gc-low-threshold="80" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328186 4718 flags.go:64] FLAG: --image-service-endpoint="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328195 4718 flags.go:64] FLAG: --kernel-memcg-notification="false" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328205 4718 flags.go:64] FLAG: --kube-api-burst="100" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328214 4718 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328223 4718 flags.go:64] FLAG: --kube-api-qps="50" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328232 4718 flags.go:64] FLAG: --kube-reserved="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328241 4718 flags.go:64] FLAG: --kube-reserved-cgroup="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328252 4718 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328262 4718 flags.go:64] FLAG: --kubelet-cgroups="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328270 4718 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328279 4718 flags.go:64] FLAG: --lock-file="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328288 4718 flags.go:64] FLAG: --log-cadvisor-usage="false" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328297 4718 flags.go:64] FLAG: --log-flush-frequency="5s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328307 4718 flags.go:64] FLAG: --log-json-info-buffer-size="0" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328320 4718 flags.go:64] FLAG: --log-json-split-stream="false" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328329 4718 flags.go:64] FLAG: --log-text-info-buffer-size="0" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328337 4718 flags.go:64] FLAG: --log-text-split-stream="false" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328347 4718 flags.go:64] FLAG: --logging-format="text" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328356 4718 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328365 4718 flags.go:64] FLAG: --make-iptables-util-chains="true" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328374 4718 flags.go:64] FLAG: --manifest-url="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328382 4718 flags.go:64] FLAG: --manifest-url-header="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328395 4718 flags.go:64] FLAG: --max-housekeeping-interval="15s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328405 4718 flags.go:64] FLAG: --max-open-files="1000000" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328416 4718 flags.go:64] FLAG: --max-pods="110" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328425 4718 flags.go:64] FLAG: --maximum-dead-containers="-1" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328435 4718 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328445 4718 flags.go:64] FLAG: --memory-manager-policy="None" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328454 4718 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328463 4718 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328472 4718 flags.go:64] FLAG: --node-ip="192.168.126.11" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328481 4718 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328501 4718 flags.go:64] FLAG: --node-status-max-images="50" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328511 4718 flags.go:64] FLAG: --node-status-update-frequency="10s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328521 4718 flags.go:64] FLAG: --oom-score-adj="-999" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328530 4718 flags.go:64] FLAG: --pod-cidr="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328538 4718 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328550 4718 flags.go:64] FLAG: --pod-manifest-path="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328560 4718 flags.go:64] FLAG: --pod-max-pids="-1" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328570 4718 flags.go:64] FLAG: --pods-per-core="0" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328581 4718 flags.go:64] FLAG: --port="10250" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328594 4718 flags.go:64] FLAG: --protect-kernel-defaults="false" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328605 4718 flags.go:64] FLAG: --provider-id="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328618 4718 flags.go:64] FLAG: --qos-reserved="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328630 4718 flags.go:64] FLAG: --read-only-port="10255" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328642 4718 flags.go:64] FLAG: --register-node="true" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328653 4718 flags.go:64] FLAG: --register-schedulable="true" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328664 4718 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328683 4718 flags.go:64] FLAG: --registry-burst="10" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328695 4718 flags.go:64] FLAG: --registry-qps="5" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328707 4718 flags.go:64] FLAG: --reserved-cpus="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328716 4718 flags.go:64] FLAG: --reserved-memory="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328728 4718 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328737 4718 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328746 4718 flags.go:64] FLAG: --rotate-certificates="false" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328755 4718 flags.go:64] FLAG: --rotate-server-certificates="false" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328764 4718 flags.go:64] FLAG: --runonce="false" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328773 4718 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328782 4718 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328791 4718 flags.go:64] FLAG: --seccomp-default="false" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328801 4718 flags.go:64] FLAG: --serialize-image-pulls="true" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328811 4718 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328820 4718 flags.go:64] FLAG: --storage-driver-db="cadvisor" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328829 4718 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328838 4718 flags.go:64] FLAG: --storage-driver-password="root" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328848 4718 flags.go:64] FLAG: --storage-driver-secure="false" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328857 4718 flags.go:64] FLAG: --storage-driver-table="stats" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328865 4718 flags.go:64] FLAG: --storage-driver-user="root" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328874 4718 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328884 4718 flags.go:64] FLAG: --sync-frequency="1m0s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328893 4718 flags.go:64] FLAG: --system-cgroups="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328901 4718 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328917 4718 flags.go:64] FLAG: --system-reserved-cgroup="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328926 4718 flags.go:64] FLAG: --tls-cert-file="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328935 4718 flags.go:64] FLAG: --tls-cipher-suites="[]" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328946 4718 flags.go:64] FLAG: --tls-min-version="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328954 4718 flags.go:64] FLAG: --tls-private-key-file="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.328963 4718 flags.go:64] FLAG: --topology-manager-policy="none" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.329009 4718 flags.go:64] FLAG: --topology-manager-policy-options="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.329020 4718 flags.go:64] FLAG: --topology-manager-scope="container" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.329030 4718 flags.go:64] FLAG: --v="2" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.329044 4718 flags.go:64] FLAG: --version="false" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.329058 4718 flags.go:64] FLAG: --vmodule="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.329071 4718 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.329085 4718 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329361 4718 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329371 4718 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329380 4718 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329388 4718 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329399 4718 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329439 4718 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329449 4718 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329458 4718 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329468 4718 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329476 4718 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329485 4718 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329494 4718 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329503 4718 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329511 4718 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329520 4718 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329528 4718 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329536 4718 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329545 4718 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329554 4718 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329562 4718 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329571 4718 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329580 4718 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329588 4718 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329597 4718 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329605 4718 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329613 4718 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329620 4718 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329629 4718 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329639 4718 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329649 4718 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329659 4718 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329668 4718 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329677 4718 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329685 4718 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329693 4718 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329701 4718 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329711 4718 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329720 4718 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329729 4718 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329738 4718 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329747 4718 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329757 4718 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329766 4718 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329774 4718 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329783 4718 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329791 4718 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329798 4718 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329807 4718 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329815 4718 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329823 4718 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329831 4718 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329838 4718 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329852 4718 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329861 4718 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329868 4718 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329885 4718 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329894 4718 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329902 4718 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329910 4718 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329918 4718 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329926 4718 feature_gate.go:330] unrecognized feature gate: Example Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329933 4718 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329942 4718 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329952 4718 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329962 4718 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.329998 4718 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.330008 4718 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.330017 4718 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.330026 4718 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.330036 4718 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.330046 4718 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.330075 4718 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.346653 4718 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.347045 4718 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347183 4718 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347196 4718 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347207 4718 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347217 4718 feature_gate.go:330] unrecognized feature gate: Example Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347227 4718 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347238 4718 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347251 4718 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347263 4718 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347272 4718 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347282 4718 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347291 4718 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347300 4718 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347309 4718 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347318 4718 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347326 4718 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347334 4718 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347342 4718 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347350 4718 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347359 4718 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347367 4718 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347376 4718 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347384 4718 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347392 4718 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347400 4718 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347408 4718 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347416 4718 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347424 4718 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347451 4718 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347460 4718 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347469 4718 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347477 4718 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347486 4718 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347494 4718 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347502 4718 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347510 4718 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347518 4718 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347526 4718 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347535 4718 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347543 4718 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347552 4718 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347561 4718 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347572 4718 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347584 4718 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347594 4718 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347605 4718 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347615 4718 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347624 4718 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347633 4718 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347642 4718 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347651 4718 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347660 4718 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347668 4718 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347676 4718 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347687 4718 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347697 4718 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347707 4718 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347716 4718 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347725 4718 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347733 4718 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347741 4718 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347749 4718 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347757 4718 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347765 4718 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347773 4718 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347781 4718 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347790 4718 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347797 4718 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347805 4718 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347814 4718 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347822 4718 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.347829 4718 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.347843 4718 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348119 4718 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348136 4718 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348147 4718 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348160 4718 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348169 4718 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348177 4718 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348187 4718 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348197 4718 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348205 4718 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348214 4718 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348222 4718 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348230 4718 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348239 4718 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348247 4718 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348258 4718 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348268 4718 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348276 4718 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348285 4718 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348293 4718 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348302 4718 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348310 4718 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348318 4718 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348326 4718 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348334 4718 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348344 4718 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348355 4718 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348363 4718 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348372 4718 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348380 4718 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348387 4718 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348395 4718 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348403 4718 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348410 4718 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348418 4718 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348426 4718 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348434 4718 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348442 4718 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348453 4718 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348463 4718 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348471 4718 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348478 4718 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348487 4718 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348494 4718 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348502 4718 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348510 4718 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348518 4718 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348527 4718 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348535 4718 feature_gate.go:330] unrecognized feature gate: Example Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348543 4718 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348551 4718 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348558 4718 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348566 4718 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348574 4718 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348581 4718 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348589 4718 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348597 4718 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348604 4718 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348612 4718 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348622 4718 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348632 4718 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348642 4718 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348650 4718 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348659 4718 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348668 4718 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348676 4718 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348684 4718 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348692 4718 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348699 4718 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348708 4718 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348717 4718 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.348725 4718 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.348739 4718 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.349084 4718 server.go:940] "Client rotation is on, will bootstrap in background" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.355908 4718 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.356101 4718 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.358332 4718 server.go:997] "Starting client certificate rotation" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.358379 4718 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.359370 4718 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-12 20:28:32.966262052 +0000 UTC Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.359448 4718 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1187h53m4.606817316s for next certificate rotation Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.395489 4718 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.399856 4718 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.421773 4718 log.go:25] "Validated CRI v1 runtime API" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.463760 4718 log.go:25] "Validated CRI v1 image API" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.466496 4718 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.474841 4718 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-11-24-08-31-06-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.474917 4718 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.508571 4718 manager.go:217] Machine: {Timestamp:2025-11-24 08:35:28.505298005 +0000 UTC m=+0.621588929 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:8f96cf3c-132c-4425-a289-ea01a722de47 BootID:b19cdd33-5845-4248-9fcf-160eb228d941 Filesystems:[{Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:20:1c:8e Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:20:1c:8e Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:c6:d8:7e Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:a0:67:d2 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:90:ba:4e Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:e4:36:be Speed:-1 Mtu:1496} {Name:eth10 MacAddress:a2:38:c3:f6:71:44 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:22:4b:60:a4:56:aa Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.508959 4718 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.509239 4718 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.511430 4718 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.511635 4718 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.511677 4718 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.511931 4718 topology_manager.go:138] "Creating topology manager with none policy" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.511945 4718 container_manager_linux.go:303] "Creating device plugin manager" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.512562 4718 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.512597 4718 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.513374 4718 state_mem.go:36] "Initialized new in-memory state store" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.513480 4718 server.go:1245] "Using root directory" path="/var/lib/kubelet" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.517824 4718 kubelet.go:418] "Attempting to sync node with API server" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.517853 4718 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.517873 4718 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.517889 4718 kubelet.go:324] "Adding apiserver pod source" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.517904 4718 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.524339 4718 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.524710 4718 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Nov 24 08:35:28 crc kubenswrapper[4718]: E1124 08:35:28.524809 4718 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.524764 4718 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Nov 24 08:35:28 crc kubenswrapper[4718]: E1124 08:35:28.524890 4718 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.525550 4718 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.527865 4718 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.529862 4718 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.529909 4718 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.529931 4718 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.529951 4718 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.530013 4718 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.530044 4718 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.530064 4718 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.530092 4718 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.530111 4718 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.530129 4718 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.530165 4718 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.530182 4718 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.531105 4718 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.531957 4718 server.go:1280] "Started kubelet" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.532336 4718 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.532393 4718 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.533295 4718 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.533909 4718 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Nov 24 08:35:28 crc systemd[1]: Started Kubernetes Kubelet. Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.536452 4718 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.536761 4718 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.536904 4718 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 09:39:03.785078311 +0000 UTC Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.536960 4718 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 673h3m35.248122013s for next certificate rotation Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.537168 4718 server.go:460] "Adding debug handlers to kubelet server" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.537336 4718 volume_manager.go:287] "The desired_state_of_world populator starts" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.537366 4718 volume_manager.go:289] "Starting Kubelet Volume Manager" Nov 24 08:35:28 crc kubenswrapper[4718]: E1124 08:35:28.537568 4718 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.538051 4718 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Nov 24 08:35:28 crc kubenswrapper[4718]: E1124 08:35:28.538684 4718 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="200ms" Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.539024 4718 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Nov 24 08:35:28 crc kubenswrapper[4718]: E1124 08:35:28.539179 4718 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Nov 24 08:35:28 crc kubenswrapper[4718]: E1124 08:35:28.543226 4718 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.220:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187ae465786860b3 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-24 08:35:28.531906739 +0000 UTC m=+0.648197683,LastTimestamp:2025-11-24 08:35:28.531906739 +0000 UTC m=+0.648197683,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.546805 4718 factory.go:55] Registering systemd factory Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.546854 4718 factory.go:221] Registration of the systemd container factory successfully Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.547210 4718 factory.go:153] Registering CRI-O factory Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.547244 4718 factory.go:221] Registration of the crio container factory successfully Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.547334 4718 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.547379 4718 factory.go:103] Registering Raw factory Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.547403 4718 manager.go:1196] Started watching for new ooms in manager Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.548297 4718 manager.go:319] Starting recovery of all containers Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.554788 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.554883 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.554902 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.554927 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.554942 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.554959 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555028 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555065 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555086 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555103 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555123 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555141 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555159 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555182 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555201 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555218 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555242 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555263 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555284 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555303 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555319 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555338 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555357 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555373 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555392 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555411 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555435 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555454 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555471 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555503 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555532 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555550 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555614 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555636 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555654 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555676 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555698 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555715 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555734 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555752 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555769 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555788 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555809 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555827 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555847 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555869 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555890 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555911 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555932 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.555951 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556096 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556116 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556140 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556160 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556179 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556206 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556231 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556297 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556312 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556325 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556339 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556351 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556365 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556378 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556395 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556408 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556424 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556440 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556456 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556471 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556486 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556500 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556516 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556531 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556545 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556560 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556575 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556589 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556601 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556615 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556629 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556642 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556656 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556670 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556683 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556698 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556714 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556731 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556746 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556763 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556777 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556794 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556810 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556827 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556843 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556859 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556877 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556892 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556907 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556922 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556936 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556951 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.556965 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557009 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557047 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557068 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557087 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557107 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557127 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557151 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557175 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557196 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557214 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557234 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557255 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557273 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557291 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557309 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557323 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557337 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557352 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557369 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557383 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557399 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557412 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557426 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557439 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557458 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557474 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557488 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557503 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557519 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557532 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557545 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557557 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557570 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557584 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557596 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557609 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557623 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557689 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557705 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557722 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557739 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557756 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557776 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557792 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557806 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557823 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557838 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557856 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557869 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557887 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557902 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557917 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557931 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.557945 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.561963 4718 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562050 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562078 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562110 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562139 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562163 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562199 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562222 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562241 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562258 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562271 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562316 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562335 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562350 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562365 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562385 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562400 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562419 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562434 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562453 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562469 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562485 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562503 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562521 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562537 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562599 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562619 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562639 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562655 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562671 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562690 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562708 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562727 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562743 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562758 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562777 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562793 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562809 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562827 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562846 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562864 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562879 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562895 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562914 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562931 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562947 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.562966 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.563004 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.563020 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.563037 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.563054 4718 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.563071 4718 reconstruct.go:97] "Volume reconstruction finished" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.563087 4718 reconciler.go:26] "Reconciler: start to sync state" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.571164 4718 manager.go:324] Recovery completed Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.584259 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.586233 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.586299 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.586314 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.587372 4718 cpu_manager.go:225] "Starting CPU manager" policy="none" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.587492 4718 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.587581 4718 state_mem.go:36] "Initialized new in-memory state store" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.591258 4718 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.595021 4718 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.595097 4718 status_manager.go:217] "Starting to sync pod status with apiserver" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.595136 4718 kubelet.go:2335] "Starting kubelet main sync loop" Nov 24 08:35:28 crc kubenswrapper[4718]: E1124 08:35:28.595203 4718 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Nov 24 08:35:28 crc kubenswrapper[4718]: W1124 08:35:28.598066 4718 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Nov 24 08:35:28 crc kubenswrapper[4718]: E1124 08:35:28.598162 4718 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.602701 4718 policy_none.go:49] "None policy: Start" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.603842 4718 memory_manager.go:170] "Starting memorymanager" policy="None" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.603879 4718 state_mem.go:35] "Initializing new in-memory state store" Nov 24 08:35:28 crc kubenswrapper[4718]: E1124 08:35:28.638541 4718 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.662938 4718 manager.go:334] "Starting Device Plugin manager" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.663032 4718 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.663075 4718 server.go:79] "Starting device plugin registration server" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.663814 4718 eviction_manager.go:189] "Eviction manager: starting control loop" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.663840 4718 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.663999 4718 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.664150 4718 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.664172 4718 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Nov 24 08:35:28 crc kubenswrapper[4718]: E1124 08:35:28.672042 4718 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.695889 4718 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.696045 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.697430 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.697482 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.697510 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.697757 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.697993 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.698035 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.698899 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.698949 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.698965 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.699067 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.699092 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.699102 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.699296 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.699460 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.699514 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.700196 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.700228 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.700241 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.700260 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.700281 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.700291 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.700431 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.700670 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.700744 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.701202 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.701235 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.701257 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.701405 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.701449 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.701499 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.701594 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.701634 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.701649 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.702271 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.702313 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.702330 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.702502 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.702537 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.703144 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.703173 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.703217 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.703299 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.703348 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.703363 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:28 crc kubenswrapper[4718]: E1124 08:35:28.739771 4718 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="400ms" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.764378 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.765092 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.765151 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.765178 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.765202 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.765274 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.765324 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.765349 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.765484 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.765516 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.765571 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.765609 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.765655 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.765682 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.765800 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.765863 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.766436 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.766484 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.766497 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.766577 4718 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 08:35:28 crc kubenswrapper[4718]: E1124 08:35:28.767180 4718 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.220:6443: connect: connection refused" node="crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867135 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867242 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867299 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867326 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867396 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867427 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867466 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867496 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867424 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867495 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867630 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867662 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867550 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867718 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867687 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867552 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867762 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867706 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867763 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867828 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867894 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867872 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.867911 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.868015 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.868213 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.868277 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.868285 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.868316 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.868370 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.868488 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.967734 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.969398 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.969442 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.969451 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:28 crc kubenswrapper[4718]: I1124 08:35:28.969481 4718 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 08:35:28 crc kubenswrapper[4718]: E1124 08:35:28.969877 4718 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.220:6443: connect: connection refused" node="crc" Nov 24 08:35:29 crc kubenswrapper[4718]: I1124 08:35:29.037891 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 08:35:29 crc kubenswrapper[4718]: I1124 08:35:29.049535 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 24 08:35:29 crc kubenswrapper[4718]: I1124 08:35:29.066463 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:35:29 crc kubenswrapper[4718]: I1124 08:35:29.080473 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 08:35:29 crc kubenswrapper[4718]: I1124 08:35:29.085277 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 08:35:29 crc kubenswrapper[4718]: W1124 08:35:29.091422 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-a13847c51dd8bceca16f30d47b92d9a698118997ba08243183efb9fff208d7f2 WatchSource:0}: Error finding container a13847c51dd8bceca16f30d47b92d9a698118997ba08243183efb9fff208d7f2: Status 404 returned error can't find the container with id a13847c51dd8bceca16f30d47b92d9a698118997ba08243183efb9fff208d7f2 Nov 24 08:35:29 crc kubenswrapper[4718]: W1124 08:35:29.094446 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-068c735082d270a71ecc40c9751e9928a5e57243a24b4de20c89f302fcd71fe8 WatchSource:0}: Error finding container 068c735082d270a71ecc40c9751e9928a5e57243a24b4de20c89f302fcd71fe8: Status 404 returned error can't find the container with id 068c735082d270a71ecc40c9751e9928a5e57243a24b4de20c89f302fcd71fe8 Nov 24 08:35:29 crc kubenswrapper[4718]: W1124 08:35:29.106772 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-e2d6f88dbf6234b739d27b58e9be39d4e10ddad90de6f1c4db0a87adf8e84f5e WatchSource:0}: Error finding container e2d6f88dbf6234b739d27b58e9be39d4e10ddad90de6f1c4db0a87adf8e84f5e: Status 404 returned error can't find the container with id e2d6f88dbf6234b739d27b58e9be39d4e10ddad90de6f1c4db0a87adf8e84f5e Nov 24 08:35:29 crc kubenswrapper[4718]: W1124 08:35:29.108773 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-40be58dc623689d61e11567a210d8c711fffd8486ccdfedaed5e27b82b45b1f3 WatchSource:0}: Error finding container 40be58dc623689d61e11567a210d8c711fffd8486ccdfedaed5e27b82b45b1f3: Status 404 returned error can't find the container with id 40be58dc623689d61e11567a210d8c711fffd8486ccdfedaed5e27b82b45b1f3 Nov 24 08:35:29 crc kubenswrapper[4718]: W1124 08:35:29.115260 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-7fb26a0251ef9a0e49d7e02c795f7323a7d352ff76bb81900f474398ae635245 WatchSource:0}: Error finding container 7fb26a0251ef9a0e49d7e02c795f7323a7d352ff76bb81900f474398ae635245: Status 404 returned error can't find the container with id 7fb26a0251ef9a0e49d7e02c795f7323a7d352ff76bb81900f474398ae635245 Nov 24 08:35:29 crc kubenswrapper[4718]: E1124 08:35:29.140932 4718 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="800ms" Nov 24 08:35:29 crc kubenswrapper[4718]: I1124 08:35:29.369961 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:29 crc kubenswrapper[4718]: I1124 08:35:29.372111 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:29 crc kubenswrapper[4718]: I1124 08:35:29.372151 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:29 crc kubenswrapper[4718]: I1124 08:35:29.372162 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:29 crc kubenswrapper[4718]: I1124 08:35:29.372192 4718 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 08:35:29 crc kubenswrapper[4718]: E1124 08:35:29.372740 4718 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.220:6443: connect: connection refused" node="crc" Nov 24 08:35:29 crc kubenswrapper[4718]: W1124 08:35:29.531888 4718 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Nov 24 08:35:29 crc kubenswrapper[4718]: E1124 08:35:29.532078 4718 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Nov 24 08:35:29 crc kubenswrapper[4718]: I1124 08:35:29.538471 4718 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Nov 24 08:35:29 crc kubenswrapper[4718]: I1124 08:35:29.603402 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"7fb26a0251ef9a0e49d7e02c795f7323a7d352ff76bb81900f474398ae635245"} Nov 24 08:35:29 crc kubenswrapper[4718]: I1124 08:35:29.604504 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"40be58dc623689d61e11567a210d8c711fffd8486ccdfedaed5e27b82b45b1f3"} Nov 24 08:35:29 crc kubenswrapper[4718]: I1124 08:35:29.605561 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e2d6f88dbf6234b739d27b58e9be39d4e10ddad90de6f1c4db0a87adf8e84f5e"} Nov 24 08:35:29 crc kubenswrapper[4718]: I1124 08:35:29.606575 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a13847c51dd8bceca16f30d47b92d9a698118997ba08243183efb9fff208d7f2"} Nov 24 08:35:29 crc kubenswrapper[4718]: I1124 08:35:29.607767 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"068c735082d270a71ecc40c9751e9928a5e57243a24b4de20c89f302fcd71fe8"} Nov 24 08:35:29 crc kubenswrapper[4718]: W1124 08:35:29.705736 4718 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Nov 24 08:35:29 crc kubenswrapper[4718]: E1124 08:35:29.705894 4718 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Nov 24 08:35:29 crc kubenswrapper[4718]: W1124 08:35:29.831697 4718 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Nov 24 08:35:29 crc kubenswrapper[4718]: E1124 08:35:29.831803 4718 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Nov 24 08:35:29 crc kubenswrapper[4718]: E1124 08:35:29.942301 4718 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="1.6s" Nov 24 08:35:29 crc kubenswrapper[4718]: W1124 08:35:29.983699 4718 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Nov 24 08:35:29 crc kubenswrapper[4718]: E1124 08:35:29.983822 4718 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.173349 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.176170 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.176225 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.176239 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.176992 4718 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 08:35:30 crc kubenswrapper[4718]: E1124 08:35:30.178252 4718 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.220:6443: connect: connection refused" node="crc" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.537586 4718 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.613700 4718 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c" exitCode=0 Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.613816 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c"} Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.613831 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.614924 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.615013 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.615068 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.616261 4718 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163" exitCode=0 Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.616437 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.616527 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163"} Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.617547 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.617584 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.617599 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.619542 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.620304 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.620336 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.620351 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.621007 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a"} Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.621062 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913"} Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.621077 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274"} Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.621088 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c"} Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.621025 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.622241 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.622272 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.622283 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.623310 4718 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028" exitCode=0 Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.623468 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028"} Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.623654 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.625058 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.625150 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.625212 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.625804 4718 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="545bc14d41432ab68f798e54e78640e19d1fc6186a1710e171eb309b504f3e77" exitCode=0 Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.625880 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.625935 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"545bc14d41432ab68f798e54e78640e19d1fc6186a1710e171eb309b504f3e77"} Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.626888 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.626921 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.626930 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:30 crc kubenswrapper[4718]: I1124 08:35:30.714642 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.537548 4718 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Nov 24 08:35:31 crc kubenswrapper[4718]: E1124 08:35:31.543605 4718 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="3.2s" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.631568 4718 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db" exitCode=0 Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.631657 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db"} Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.631692 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.632514 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.632558 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.632569 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.634266 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"59a58ada5e1c2a88df71a98a5d52c0b32aa00c68b701b5cd1711d39894553754"} Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.634308 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.635217 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.635348 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.635375 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.636563 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.636702 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e946f83395434468ac3f7b67789108982e4341984166b966312fef9bcb1a9d48"} Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.636734 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"cb96b198cd151b3cd4336ecbb27650bb3104c4c68d815ec5b911ee0aca16da2b"} Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.636747 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"b2447b7b95e023d5250d7f7f92372712537ac3698312da71d6c6c70b23bccf1e"} Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.637415 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.637443 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.637457 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.639249 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da"} Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.639286 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.639293 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59"} Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.639310 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3"} Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.639324 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb"} Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.639892 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.639929 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.639942 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.778667 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.781268 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.781324 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.781337 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:31 crc kubenswrapper[4718]: I1124 08:35:31.781374 4718 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 08:35:31 crc kubenswrapper[4718]: E1124 08:35:31.781920 4718 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.220:6443: connect: connection refused" node="crc" Nov 24 08:35:31 crc kubenswrapper[4718]: W1124 08:35:31.978070 4718 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Nov 24 08:35:31 crc kubenswrapper[4718]: E1124 08:35:31.978168 4718 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.550592 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.646697 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.649744 4718 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6f2c769ba6c0e50e87f46ec567abcacdaa958e75f7c76edab8a19c4d67fb80c9" exitCode=255 Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.649859 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"6f2c769ba6c0e50e87f46ec567abcacdaa958e75f7c76edab8a19c4d67fb80c9"} Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.649943 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.651605 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.651678 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.651698 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.652953 4718 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff" exitCode=0 Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.653074 4718 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.653122 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.653134 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.653161 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.653366 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff"} Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.653524 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.654216 4718 scope.go:117] "RemoveContainer" containerID="6f2c769ba6c0e50e87f46ec567abcacdaa958e75f7c76edab8a19c4d67fb80c9" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.654902 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.654932 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.654943 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.655102 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.655135 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.655138 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.655156 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.655183 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.655208 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.655153 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.655298 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:32 crc kubenswrapper[4718]: I1124 08:35:32.655322 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.558340 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.658889 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.661356 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b"} Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.661660 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.663534 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.663618 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.663647 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.667604 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.667631 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.667698 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b"} Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.667769 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76"} Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.667814 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc"} Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.667842 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9"} Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.667868 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b"} Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.668728 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.668761 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.668773 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.668896 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.668938 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:33 crc kubenswrapper[4718]: I1124 08:35:33.668951 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:34 crc kubenswrapper[4718]: I1124 08:35:34.670601 4718 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 08:35:34 crc kubenswrapper[4718]: I1124 08:35:34.670684 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:34 crc kubenswrapper[4718]: I1124 08:35:34.670614 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:34 crc kubenswrapper[4718]: I1124 08:35:34.671834 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:34 crc kubenswrapper[4718]: I1124 08:35:34.671878 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:34 crc kubenswrapper[4718]: I1124 08:35:34.671893 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:34 crc kubenswrapper[4718]: I1124 08:35:34.672452 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:34 crc kubenswrapper[4718]: I1124 08:35:34.672488 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:34 crc kubenswrapper[4718]: I1124 08:35:34.672502 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:34 crc kubenswrapper[4718]: I1124 08:35:34.752733 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Nov 24 08:35:34 crc kubenswrapper[4718]: I1124 08:35:34.982960 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:34 crc kubenswrapper[4718]: I1124 08:35:34.985042 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:34 crc kubenswrapper[4718]: I1124 08:35:34.985113 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:34 crc kubenswrapper[4718]: I1124 08:35:34.985127 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:34 crc kubenswrapper[4718]: I1124 08:35:34.985164 4718 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 08:35:35 crc kubenswrapper[4718]: I1124 08:35:35.721089 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:35 crc kubenswrapper[4718]: I1124 08:35:35.722033 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:35 crc kubenswrapper[4718]: I1124 08:35:35.722077 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:35 crc kubenswrapper[4718]: I1124 08:35:35.722089 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:36 crc kubenswrapper[4718]: I1124 08:35:36.027958 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:35:36 crc kubenswrapper[4718]: I1124 08:35:36.028283 4718 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 08:35:36 crc kubenswrapper[4718]: I1124 08:35:36.028359 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:36 crc kubenswrapper[4718]: I1124 08:35:36.029731 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:36 crc kubenswrapper[4718]: I1124 08:35:36.029780 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:36 crc kubenswrapper[4718]: I1124 08:35:36.029791 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:37 crc kubenswrapper[4718]: I1124 08:35:37.424685 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 08:35:37 crc kubenswrapper[4718]: I1124 08:35:37.424854 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:37 crc kubenswrapper[4718]: I1124 08:35:37.425812 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:37 crc kubenswrapper[4718]: I1124 08:35:37.425840 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:37 crc kubenswrapper[4718]: I1124 08:35:37.425851 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:37 crc kubenswrapper[4718]: I1124 08:35:37.918497 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:35:37 crc kubenswrapper[4718]: I1124 08:35:37.918773 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:37 crc kubenswrapper[4718]: I1124 08:35:37.920316 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:37 crc kubenswrapper[4718]: I1124 08:35:37.920375 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:37 crc kubenswrapper[4718]: I1124 08:35:37.920393 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:38 crc kubenswrapper[4718]: I1124 08:35:38.038595 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 08:35:38 crc kubenswrapper[4718]: I1124 08:35:38.038835 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:38 crc kubenswrapper[4718]: I1124 08:35:38.040931 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:38 crc kubenswrapper[4718]: I1124 08:35:38.041024 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:38 crc kubenswrapper[4718]: I1124 08:35:38.041067 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:38 crc kubenswrapper[4718]: I1124 08:35:38.044414 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 08:35:38 crc kubenswrapper[4718]: I1124 08:35:38.191445 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Nov 24 08:35:38 crc kubenswrapper[4718]: I1124 08:35:38.191693 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:38 crc kubenswrapper[4718]: I1124 08:35:38.192867 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:38 crc kubenswrapper[4718]: I1124 08:35:38.192897 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:38 crc kubenswrapper[4718]: I1124 08:35:38.192908 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:38 crc kubenswrapper[4718]: E1124 08:35:38.672549 4718 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 24 08:35:38 crc kubenswrapper[4718]: I1124 08:35:38.731020 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:38 crc kubenswrapper[4718]: I1124 08:35:38.734363 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:38 crc kubenswrapper[4718]: I1124 08:35:38.735053 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:38 crc kubenswrapper[4718]: I1124 08:35:38.735113 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:39 crc kubenswrapper[4718]: I1124 08:35:39.724095 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 08:35:39 crc kubenswrapper[4718]: I1124 08:35:39.734622 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:39 crc kubenswrapper[4718]: I1124 08:35:39.735786 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:39 crc kubenswrapper[4718]: I1124 08:35:39.735832 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:39 crc kubenswrapper[4718]: I1124 08:35:39.735848 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:39 crc kubenswrapper[4718]: I1124 08:35:39.739409 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 08:35:40 crc kubenswrapper[4718]: I1124 08:35:40.737405 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:40 crc kubenswrapper[4718]: I1124 08:35:40.738647 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:40 crc kubenswrapper[4718]: I1124 08:35:40.738747 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:40 crc kubenswrapper[4718]: I1124 08:35:40.738808 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:42 crc kubenswrapper[4718]: W1124 08:35:42.498097 4718 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Nov 24 08:35:42 crc kubenswrapper[4718]: I1124 08:35:42.498208 4718 trace.go:236] Trace[1965038844]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (24-Nov-2025 08:35:32.495) (total time: 10002ms): Nov 24 08:35:42 crc kubenswrapper[4718]: Trace[1965038844]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (08:35:42.498) Nov 24 08:35:42 crc kubenswrapper[4718]: Trace[1965038844]: [10.002303875s] [10.002303875s] END Nov 24 08:35:42 crc kubenswrapper[4718]: E1124 08:35:42.498231 4718 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Nov 24 08:35:42 crc kubenswrapper[4718]: I1124 08:35:42.538390 4718 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Nov 24 08:35:42 crc kubenswrapper[4718]: I1124 08:35:42.599881 4718 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\": RBAC: [clusterrole.rbac.authorization.k8s.io \"system:public-info-viewer\" not found, clusterrole.rbac.authorization.k8s.io \"system:openshift:public-info-viewer\" not found]","reason":"Forbidden","details":{},"code":403} Nov 24 08:35:42 crc kubenswrapper[4718]: I1124 08:35:42.599988 4718 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Nov 24 08:35:42 crc kubenswrapper[4718]: I1124 08:35:42.607495 4718 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\": RBAC: [clusterrole.rbac.authorization.k8s.io \"system:public-info-viewer\" not found, clusterrole.rbac.authorization.k8s.io \"system:openshift:public-info-viewer\" not found]","reason":"Forbidden","details":{},"code":403} Nov 24 08:35:42 crc kubenswrapper[4718]: I1124 08:35:42.607558 4718 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Nov 24 08:35:42 crc kubenswrapper[4718]: I1124 08:35:42.725033 4718 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 24 08:35:42 crc kubenswrapper[4718]: I1124 08:35:42.725108 4718 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 24 08:35:46 crc kubenswrapper[4718]: I1124 08:35:46.032421 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:35:46 crc kubenswrapper[4718]: I1124 08:35:46.032660 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:46 crc kubenswrapper[4718]: I1124 08:35:46.033137 4718 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Nov 24 08:35:46 crc kubenswrapper[4718]: I1124 08:35:46.033192 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Nov 24 08:35:46 crc kubenswrapper[4718]: I1124 08:35:46.033728 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:46 crc kubenswrapper[4718]: I1124 08:35:46.033756 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:46 crc kubenswrapper[4718]: I1124 08:35:46.033766 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:46 crc kubenswrapper[4718]: I1124 08:35:46.035843 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:35:46 crc kubenswrapper[4718]: I1124 08:35:46.475139 4718 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Nov 24 08:35:46 crc kubenswrapper[4718]: I1124 08:35:46.755288 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:46 crc kubenswrapper[4718]: I1124 08:35:46.755752 4718 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Nov 24 08:35:46 crc kubenswrapper[4718]: I1124 08:35:46.755798 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Nov 24 08:35:46 crc kubenswrapper[4718]: I1124 08:35:46.756378 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:46 crc kubenswrapper[4718]: I1124 08:35:46.756419 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:46 crc kubenswrapper[4718]: I1124 08:35:46.756431 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:47 crc kubenswrapper[4718]: E1124 08:35:47.575162 4718 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Nov 24 08:35:47 crc kubenswrapper[4718]: I1124 08:35:47.575239 4718 trace.go:236] Trace[1071920863]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (24-Nov-2025 08:35:32.844) (total time: 14730ms): Nov 24 08:35:47 crc kubenswrapper[4718]: Trace[1071920863]: ---"Objects listed" error: 14730ms (08:35:47.575) Nov 24 08:35:47 crc kubenswrapper[4718]: Trace[1071920863]: [14.730653678s] [14.730653678s] END Nov 24 08:35:47 crc kubenswrapper[4718]: I1124 08:35:47.575307 4718 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Nov 24 08:35:47 crc kubenswrapper[4718]: I1124 08:35:47.576782 4718 trace.go:236] Trace[1422931570]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (24-Nov-2025 08:35:33.033) (total time: 14543ms): Nov 24 08:35:47 crc kubenswrapper[4718]: Trace[1422931570]: ---"Objects listed" error: 14543ms (08:35:47.576) Nov 24 08:35:47 crc kubenswrapper[4718]: Trace[1422931570]: [14.543198844s] [14.543198844s] END Nov 24 08:35:47 crc kubenswrapper[4718]: I1124 08:35:47.576810 4718 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Nov 24 08:35:47 crc kubenswrapper[4718]: I1124 08:35:47.577377 4718 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Nov 24 08:35:47 crc kubenswrapper[4718]: I1124 08:35:47.577417 4718 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Nov 24 08:35:47 crc kubenswrapper[4718]: E1124 08:35:47.577504 4718 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.007440 4718 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:43434->192.168.126.11:17697: read: connection reset by peer" start-of-body= Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.007515 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:43434->192.168.126.11:17697: read: connection reset by peer" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.217739 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.228913 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.530186 4718 apiserver.go:52] "Watching apiserver" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.534039 4718 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.534468 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-8cfq9","openshift-etcd/etcd-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h"] Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.534944 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.535073 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.535188 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.535304 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 08:35:48 crc kubenswrapper[4718]: E1124 08:35:48.535311 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:35:48 crc kubenswrapper[4718]: E1124 08:35:48.535356 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.536231 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-8cfq9" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.536265 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:35:48 crc kubenswrapper[4718]: E1124 08:35:48.536491 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.536668 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.539250 4718 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.539507 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.539756 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.539819 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.539847 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.540164 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.540187 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.542135 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.542500 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.542713 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.542916 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.543882 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.544526 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.573590 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.582611 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.582673 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.582701 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.582730 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.582769 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.582820 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.582848 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.582874 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.582895 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583010 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583039 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583066 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583092 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583121 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583152 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583178 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583217 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583249 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583276 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583301 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583328 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583357 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583386 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583410 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584047 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584075 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584128 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584152 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584171 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584188 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584207 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584226 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584246 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584281 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584306 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584331 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584357 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584378 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584406 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584431 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584455 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584473 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584492 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584511 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584529 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584547 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584568 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585217 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585272 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585297 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585317 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585342 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585360 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585381 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585404 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585422 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585516 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585536 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585562 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585592 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585619 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585645 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585669 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585689 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585706 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585728 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585748 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585766 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585843 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585866 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585884 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585902 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585924 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585944 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585987 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.586013 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589591 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589623 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589643 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589663 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589681 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589703 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589720 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589735 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589758 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589777 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589792 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589810 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589826 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589842 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.590173 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.590492 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.590593 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583034 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583181 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583318 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583563 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584007 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584219 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.583932 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.584815 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585172 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585202 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597038 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597053 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585562 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585816 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.588191 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.588239 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.588253 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.588263 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.588375 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.588462 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.588632 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589001 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589123 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589169 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589208 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589256 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589317 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589445 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589544 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589558 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589331 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589635 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589827 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589839 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.590021 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.590087 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.590375 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.590408 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.590615 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: E1124 08:35:48.590685 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:35:49.09066246 +0000 UTC m=+21.206953364 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597274 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597283 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597324 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597359 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597388 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597414 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597438 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597463 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597513 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597546 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597571 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597596 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597624 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597699 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597728 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597752 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597791 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597815 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597838 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597864 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597886 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597911 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597935 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597959 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.597999 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598019 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598025 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598073 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598101 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598129 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598153 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598214 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598236 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598245 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598279 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598323 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598350 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598376 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598436 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598466 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598498 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598525 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598553 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598579 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598611 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598637 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598664 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598695 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598719 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598743 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598771 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598795 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598819 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598873 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598899 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598931 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598963 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599005 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599031 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599055 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599111 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599139 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599165 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599191 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599216 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599241 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599343 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599376 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599404 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599456 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599485 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599510 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599536 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599630 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599672 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599696 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599719 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599743 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599773 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599798 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599823 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599848 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599877 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599903 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599928 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599950 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600005 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600033 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600057 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600082 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600108 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600133 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600156 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600180 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600204 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600228 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600290 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600318 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600343 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600370 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600398 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600376 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600423 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600758 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600812 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600850 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600878 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.612132 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.612201 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.612259 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.612324 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.612347 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.615001 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.616111 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.616195 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzs5j\" (UniqueName: \"kubernetes.io/projected/ae67d599-f070-4cc9-a934-d546e1d84e7d-kube-api-access-wzs5j\") pod \"node-resolver-8cfq9\" (UID: \"ae67d599-f070-4cc9-a934-d546e1d84e7d\") " pod="openshift-dns/node-resolver-8cfq9" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.616439 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598466 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.598735 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.590666 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.590703 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.590784 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.590890 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.590923 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.590501 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.590927 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.591048 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.591940 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.592095 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.592116 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.592650 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.592612 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.592683 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.593018 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.593102 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.593179 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.593683 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.593736 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.593955 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.594343 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.594923 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.595137 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.595307 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.595421 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.595702 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.595734 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.595891 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.596164 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.596406 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.596642 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.596719 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599252 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.599392 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600093 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.600413 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.589581 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.610448 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.610448 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.610690 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.610760 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.610914 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.611148 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.611199 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.611251 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.611738 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.585407 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.611842 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.611910 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.612678 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.612703 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.613174 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.613340 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.613770 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.613795 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.613948 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.614111 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.614404 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.614489 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.614543 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.615143 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.615426 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.615567 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.615690 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.616393 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.616404 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.616416 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.616430 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.616755 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.616836 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.617036 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.617090 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.617081 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.617097 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.617656 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.619848 4718 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.622270 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.622352 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.622446 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.622481 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.623844 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 08:35:48 crc kubenswrapper[4718]: E1124 08:35:48.623964 4718 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.624063 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/ae67d599-f070-4cc9-a934-d546e1d84e7d-hosts-file\") pod \"node-resolver-8cfq9\" (UID: \"ae67d599-f070-4cc9-a934-d546e1d84e7d\") " pod="openshift-dns/node-resolver-8cfq9" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.624100 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.624156 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.624185 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.624381 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.626043 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: E1124 08:35:48.628085 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 08:35:49.128053351 +0000 UTC m=+21.244344255 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.628090 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.628125 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: E1124 08:35:48.628249 4718 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.628934 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.629097 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.629673 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.631417 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.631993 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.641486 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.632000 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.633169 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.634382 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.635433 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.636752 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.637047 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: E1124 08:35:48.640144 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 08:35:49.140110211 +0000 UTC m=+21.256401115 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648079 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648181 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.640360 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648614 4718 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648681 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648699 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648718 4718 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648735 4718 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648751 4718 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648767 4718 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648780 4718 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648792 4718 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648808 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648802 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648824 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648840 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648853 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648894 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648909 4718 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648927 4718 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648939 4718 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648953 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648985 4718 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.648999 4718 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649080 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649095 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649108 4718 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649121 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649137 4718 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649150 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649165 4718 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649179 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649192 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649205 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649218 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649231 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649245 4718 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649259 4718 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649271 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649284 4718 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649298 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649311 4718 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649324 4718 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649339 4718 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649353 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.649366 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.629915 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.651476 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.651801 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: E1124 08:35:48.652191 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 08:35:48 crc kubenswrapper[4718]: E1124 08:35:48.652237 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 08:35:48 crc kubenswrapper[4718]: E1124 08:35:48.652254 4718 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:48 crc kubenswrapper[4718]: E1124 08:35:48.652329 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 08:35:49.152305694 +0000 UTC m=+21.268596788 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:48 crc kubenswrapper[4718]: E1124 08:35:48.652197 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 08:35:48 crc kubenswrapper[4718]: E1124 08:35:48.652368 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 08:35:48 crc kubenswrapper[4718]: E1124 08:35:48.652381 4718 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:48 crc kubenswrapper[4718]: E1124 08:35:48.652412 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 08:35:49.152403397 +0000 UTC m=+21.268694521 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.641432 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.652414 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.641571 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.641796 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.641909 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.641922 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.642566 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.642998 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.644184 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.644468 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.646080 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.646401 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.646803 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.640489 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.653769 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.653888 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.654082 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.654512 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.654774 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.655234 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.655994 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.659256 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.661928 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.662205 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.662399 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.662515 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.662781 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.662860 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.663202 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.663328 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.663502 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.663775 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.664272 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.664457 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.664663 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.664934 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.665454 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.667371 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.667864 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.670082 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.670505 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.670514 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.670728 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.671065 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.671311 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.671364 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.671395 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.672006 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.672376 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.672401 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.676701 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.678144 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.680254 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.680743 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.680768 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.689361 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.690090 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.699238 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.699700 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.702666 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.703808 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.704491 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.705634 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.707089 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.707728 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.709376 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.715161 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.720400 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.725164 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.728070 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.734989 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.750812 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754302 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754359 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzs5j\" (UniqueName: \"kubernetes.io/projected/ae67d599-f070-4cc9-a934-d546e1d84e7d-kube-api-access-wzs5j\") pod \"node-resolver-8cfq9\" (UID: \"ae67d599-f070-4cc9-a934-d546e1d84e7d\") " pod="openshift-dns/node-resolver-8cfq9" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754385 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754407 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/ae67d599-f070-4cc9-a934-d546e1d84e7d-hosts-file\") pod \"node-resolver-8cfq9\" (UID: \"ae67d599-f070-4cc9-a934-d546e1d84e7d\") " pod="openshift-dns/node-resolver-8cfq9" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754460 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754470 4718 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754479 4718 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754489 4718 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754499 4718 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754508 4718 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754517 4718 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754529 4718 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754537 4718 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754545 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754553 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754569 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754577 4718 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754586 4718 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754603 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754611 4718 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754619 4718 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754627 4718 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754636 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754644 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754653 4718 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754661 4718 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754670 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754678 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754688 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754696 4718 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754704 4718 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754712 4718 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754721 4718 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754728 4718 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754736 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754745 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754754 4718 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754761 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754771 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754788 4718 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754785 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754800 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754841 4718 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754855 4718 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754867 4718 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754881 4718 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754893 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754912 4718 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754926 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754938 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754952 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.754992 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755005 4718 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755013 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755022 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755031 4718 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755038 4718 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755047 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755055 4718 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755064 4718 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755071 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755079 4718 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755088 4718 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755101 4718 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755111 4718 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755119 4718 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755127 4718 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755135 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755142 4718 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755160 4718 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755176 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755178 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755200 4718 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755210 4718 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755218 4718 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755227 4718 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755236 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755243 4718 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755251 4718 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755261 4718 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755270 4718 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755278 4718 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755285 4718 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755293 4718 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755301 4718 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755310 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755319 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755328 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755337 4718 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755345 4718 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755353 4718 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755361 4718 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755369 4718 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755377 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755386 4718 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755395 4718 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755404 4718 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755412 4718 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755421 4718 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755429 4718 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755437 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755445 4718 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755454 4718 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755461 4718 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755470 4718 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755478 4718 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755486 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755494 4718 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755503 4718 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755512 4718 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755521 4718 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755521 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/ae67d599-f070-4cc9-a934-d546e1d84e7d-hosts-file\") pod \"node-resolver-8cfq9\" (UID: \"ae67d599-f070-4cc9-a934-d546e1d84e7d\") " pod="openshift-dns/node-resolver-8cfq9" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755530 4718 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755562 4718 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755576 4718 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755589 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755601 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755613 4718 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755625 4718 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755636 4718 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755647 4718 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755659 4718 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755671 4718 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755683 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755695 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755707 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755720 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755732 4718 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755745 4718 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755758 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755770 4718 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755781 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755793 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755805 4718 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755816 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755827 4718 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755839 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755850 4718 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755862 4718 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755873 4718 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755883 4718 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755894 4718 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755904 4718 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755918 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755929 4718 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755942 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755954 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755985 4718 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.755998 4718 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.756009 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.756019 4718 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.756031 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.756042 4718 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.756053 4718 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.756065 4718 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.756076 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.756089 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.756103 4718 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.770439 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.771721 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.771794 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.775603 4718 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b" exitCode=255 Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.776160 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b"} Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.776313 4718 scope.go:117] "RemoveContainer" containerID="6f2c769ba6c0e50e87f46ec567abcacdaa958e75f7c76edab8a19c4d67fb80c9" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.782199 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzs5j\" (UniqueName: \"kubernetes.io/projected/ae67d599-f070-4cc9-a934-d546e1d84e7d-kube-api-access-wzs5j\") pod \"node-resolver-8cfq9\" (UID: \"ae67d599-f070-4cc9-a934-d546e1d84e7d\") " pod="openshift-dns/node-resolver-8cfq9" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.786517 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:48 crc kubenswrapper[4718]: E1124 08:35:48.790214 4718 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"etcd-crc\" already exists" pod="openshift-etcd/etcd-crc" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.810887 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.836048 4718 scope.go:117] "RemoveContainer" containerID="5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b" Nov 24 08:35:48 crc kubenswrapper[4718]: E1124 08:35:48.836359 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.836661 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.843413 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.853419 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.860731 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.863715 4718 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.867599 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.869754 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-8cfq9" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.875174 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.891491 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.922712 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.950129 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.969445 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.980408 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:48 crc kubenswrapper[4718]: I1124 08:35:48.991440 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.004412 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-zvlvh"] Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.004922 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.007582 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.009017 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.009659 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.009666 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.010209 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.016932 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.036609 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.055489 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.058998 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/811ba3ee-aad5-427c-84f7-fbd3b78255ec-multus-daemon-config\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.059042 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-os-release\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.059066 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-multus-cni-dir\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.059112 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-hostroot\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.059136 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-host-var-lib-kubelet\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.059152 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/811ba3ee-aad5-427c-84f7-fbd3b78255ec-cni-binary-copy\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.059165 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-multus-socket-dir-parent\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.059196 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-host-var-lib-cni-bin\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.059213 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-host-var-lib-cni-multus\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.059228 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-etc-kubernetes\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.059251 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-cnibin\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.059266 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-host-run-netns\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.059280 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-multus-conf-dir\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.059293 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-host-run-multus-certs\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.059310 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qf62t\" (UniqueName: \"kubernetes.io/projected/811ba3ee-aad5-427c-84f7-fbd3b78255ec-kube-api-access-qf62t\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.059324 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-system-cni-dir\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.059338 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-host-run-k8s-cni-cncf-io\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.072596 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.082499 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.093179 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2c769ba6c0e50e87f46ec567abcacdaa958e75f7c76edab8a19c4d67fb80c9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"message\\\":\\\"W1124 08:35:31.786752 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 08:35:31.787432 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763973331 cert, and key in /tmp/serving-cert-3431097427/serving-signer.crt, /tmp/serving-cert-3431097427/serving-signer.key\\\\nI1124 08:35:31.933875 1 observer_polling.go:159] Starting file observer\\\\nW1124 08:35:31.937511 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 08:35:31.937667 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:31.938964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3431097427/tls.crt::/tmp/serving-cert-3431097427/tls.key\\\\\\\"\\\\nF1124 08:35:32.158060 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.103031 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.112347 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.121516 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.143176 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.160879 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161017 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-multus-cni-dir\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161043 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-hostroot\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161070 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161097 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161120 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-host-var-lib-kubelet\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161144 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-multus-socket-dir-parent\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161165 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-host-var-lib-cni-bin\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161187 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-host-var-lib-cni-multus\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161209 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-etc-kubernetes\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161239 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/811ba3ee-aad5-427c-84f7-fbd3b78255ec-cni-binary-copy\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161264 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161284 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-host-run-netns\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161305 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-multus-conf-dir\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161326 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-host-run-multus-certs\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161347 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qf62t\" (UniqueName: \"kubernetes.io/projected/811ba3ee-aad5-427c-84f7-fbd3b78255ec-kube-api-access-qf62t\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161369 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-cnibin\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161391 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-system-cni-dir\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161415 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-host-run-k8s-cni-cncf-io\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161440 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161467 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-os-release\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.161489 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/811ba3ee-aad5-427c-84f7-fbd3b78255ec-multus-daemon-config\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.162072 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-host-run-k8s-cni-cncf-io\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.162175 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-cnibin\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.162231 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-system-cni-dir\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.162258 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-host-run-netns\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.162288 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/811ba3ee-aad5-427c-84f7-fbd3b78255ec-cni-binary-copy\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: E1124 08:35:49.162322 4718 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.162334 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/811ba3ee-aad5-427c-84f7-fbd3b78255ec-multus-daemon-config\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.162354 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-host-run-multus-certs\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: E1124 08:35:49.162383 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 08:35:50.162352735 +0000 UTC m=+22.278643639 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.162338 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-multus-conf-dir\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: E1124 08:35:49.162401 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:35:50.162394287 +0000 UTC m=+22.278685191 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.162422 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-multus-cni-dir\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.162435 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-host-var-lib-kubelet\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.162459 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-multus-socket-dir-parent\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: E1124 08:35:49.162464 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.162473 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-hostroot\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.162483 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-host-var-lib-cni-bin\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: E1124 08:35:49.162496 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.162507 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-host-var-lib-cni-multus\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: E1124 08:35:49.162511 4718 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.162538 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-etc-kubernetes\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: E1124 08:35:49.162568 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 08:35:49 crc kubenswrapper[4718]: E1124 08:35:49.162583 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 08:35:50.162569381 +0000 UTC m=+22.278860285 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:49 crc kubenswrapper[4718]: E1124 08:35:49.162587 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.162595 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/811ba3ee-aad5-427c-84f7-fbd3b78255ec-os-release\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: E1124 08:35:49.162603 4718 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:49 crc kubenswrapper[4718]: E1124 08:35:49.162630 4718 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 08:35:49 crc kubenswrapper[4718]: E1124 08:35:49.162652 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 08:35:50.162634173 +0000 UTC m=+22.278925157 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:49 crc kubenswrapper[4718]: E1124 08:35:49.162677 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 08:35:50.162662953 +0000 UTC m=+22.278953917 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.165884 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.174692 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.184224 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qf62t\" (UniqueName: \"kubernetes.io/projected/811ba3ee-aad5-427c-84f7-fbd3b78255ec-kube-api-access-qf62t\") pod \"multus-zvlvh\" (UID: \"811ba3ee-aad5-427c-84f7-fbd3b78255ec\") " pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.185016 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.200700 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2c769ba6c0e50e87f46ec567abcacdaa958e75f7c76edab8a19c4d67fb80c9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"message\\\":\\\"W1124 08:35:31.786752 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 08:35:31.787432 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763973331 cert, and key in /tmp/serving-cert-3431097427/serving-signer.crt, /tmp/serving-cert-3431097427/serving-signer.key\\\\nI1124 08:35:31.933875 1 observer_polling.go:159] Starting file observer\\\\nW1124 08:35:31.937511 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 08:35:31.937667 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:31.938964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3431097427/tls.crt::/tmp/serving-cert-3431097427/tls.key\\\\\\\"\\\\nF1124 08:35:32.158060 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.217660 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.227494 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.247416 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.262502 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.274489 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.321859 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-zvlvh" Nov 24 08:35:49 crc kubenswrapper[4718]: W1124 08:35:49.341616 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod811ba3ee_aad5_427c_84f7_fbd3b78255ec.slice/crio-e64870531b1124b3d4da635097ea62a0e7815a97ef0f3d28fa40344e826247a8 WatchSource:0}: Error finding container e64870531b1124b3d4da635097ea62a0e7815a97ef0f3d28fa40344e826247a8: Status 404 returned error can't find the container with id e64870531b1124b3d4da635097ea62a0e7815a97ef0f3d28fa40344e826247a8 Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.359904 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-575gl"] Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.360362 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.361997 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.362489 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-qbwmc"] Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.363192 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.363860 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.364039 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.364272 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.364471 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.364734 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.365829 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.382419 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.393489 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.406918 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2c769ba6c0e50e87f46ec567abcacdaa958e75f7c76edab8a19c4d67fb80c9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"message\\\":\\\"W1124 08:35:31.786752 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 08:35:31.787432 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763973331 cert, and key in /tmp/serving-cert-3431097427/serving-signer.crt, /tmp/serving-cert-3431097427/serving-signer.key\\\\nI1124 08:35:31.933875 1 observer_polling.go:159] Starting file observer\\\\nW1124 08:35:31.937511 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 08:35:31.937667 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:31.938964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3431097427/tls.crt::/tmp/serving-cert-3431097427/tls.key\\\\\\\"\\\\nF1124 08:35:32.158060 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.415952 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.426281 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.435696 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.445669 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.456953 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.466288 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.466713 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/f25c7267-0621-49ab-91e3-08d7d85c815d-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.466749 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f25c7267-0621-49ab-91e3-08d7d85c815d-cni-binary-copy\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.466777 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f25c7267-0621-49ab-91e3-08d7d85c815d-cnibin\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.466796 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/89887d07-87db-4f4f-a6fa-3cd34e814131-proxy-tls\") pod \"machine-config-daemon-575gl\" (UID: \"89887d07-87db-4f4f-a6fa-3cd34e814131\") " pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.466840 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f25c7267-0621-49ab-91e3-08d7d85c815d-os-release\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.466858 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f25c7267-0621-49ab-91e3-08d7d85c815d-system-cni-dir\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.466912 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f25c7267-0621-49ab-91e3-08d7d85c815d-tuning-conf-dir\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.466937 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdxxl\" (UniqueName: \"kubernetes.io/projected/f25c7267-0621-49ab-91e3-08d7d85c815d-kube-api-access-pdxxl\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.466950 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/89887d07-87db-4f4f-a6fa-3cd34e814131-rootfs\") pod \"machine-config-daemon-575gl\" (UID: \"89887d07-87db-4f4f-a6fa-3cd34e814131\") " pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.466998 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/89887d07-87db-4f4f-a6fa-3cd34e814131-mcd-auth-proxy-config\") pod \"machine-config-daemon-575gl\" (UID: \"89887d07-87db-4f4f-a6fa-3cd34e814131\") " pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.467043 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhqzp\" (UniqueName: \"kubernetes.io/projected/89887d07-87db-4f4f-a6fa-3cd34e814131-kube-api-access-rhqzp\") pod \"machine-config-daemon-575gl\" (UID: \"89887d07-87db-4f4f-a6fa-3cd34e814131\") " pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.473850 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.484709 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.497206 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.505894 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.512894 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.522798 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.538726 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.549267 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.560676 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.567740 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/89887d07-87db-4f4f-a6fa-3cd34e814131-mcd-auth-proxy-config\") pod \"machine-config-daemon-575gl\" (UID: \"89887d07-87db-4f4f-a6fa-3cd34e814131\") " pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.567789 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhqzp\" (UniqueName: \"kubernetes.io/projected/89887d07-87db-4f4f-a6fa-3cd34e814131-kube-api-access-rhqzp\") pod \"machine-config-daemon-575gl\" (UID: \"89887d07-87db-4f4f-a6fa-3cd34e814131\") " pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.567825 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/f25c7267-0621-49ab-91e3-08d7d85c815d-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.567852 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f25c7267-0621-49ab-91e3-08d7d85c815d-cni-binary-copy\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.567889 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f25c7267-0621-49ab-91e3-08d7d85c815d-cnibin\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.567926 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/89887d07-87db-4f4f-a6fa-3cd34e814131-proxy-tls\") pod \"machine-config-daemon-575gl\" (UID: \"89887d07-87db-4f4f-a6fa-3cd34e814131\") " pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.567952 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f25c7267-0621-49ab-91e3-08d7d85c815d-os-release\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.567997 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f25c7267-0621-49ab-91e3-08d7d85c815d-system-cni-dir\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.568023 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f25c7267-0621-49ab-91e3-08d7d85c815d-tuning-conf-dir\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.568058 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/89887d07-87db-4f4f-a6fa-3cd34e814131-rootfs\") pod \"machine-config-daemon-575gl\" (UID: \"89887d07-87db-4f4f-a6fa-3cd34e814131\") " pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.568084 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdxxl\" (UniqueName: \"kubernetes.io/projected/f25c7267-0621-49ab-91e3-08d7d85c815d-kube-api-access-pdxxl\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.568536 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f25c7267-0621-49ab-91e3-08d7d85c815d-system-cni-dir\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.568577 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f25c7267-0621-49ab-91e3-08d7d85c815d-os-release\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.568593 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f25c7267-0621-49ab-91e3-08d7d85c815d-tuning-conf-dir\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.568619 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/89887d07-87db-4f4f-a6fa-3cd34e814131-rootfs\") pod \"machine-config-daemon-575gl\" (UID: \"89887d07-87db-4f4f-a6fa-3cd34e814131\") " pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.568663 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f25c7267-0621-49ab-91e3-08d7d85c815d-cnibin\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.568676 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/f25c7267-0621-49ab-91e3-08d7d85c815d-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.569167 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/89887d07-87db-4f4f-a6fa-3cd34e814131-mcd-auth-proxy-config\") pod \"machine-config-daemon-575gl\" (UID: \"89887d07-87db-4f4f-a6fa-3cd34e814131\") " pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.569490 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f25c7267-0621-49ab-91e3-08d7d85c815d-cni-binary-copy\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.572335 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/89887d07-87db-4f4f-a6fa-3cd34e814131-proxy-tls\") pod \"machine-config-daemon-575gl\" (UID: \"89887d07-87db-4f4f-a6fa-3cd34e814131\") " pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.572288 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2c769ba6c0e50e87f46ec567abcacdaa958e75f7c76edab8a19c4d67fb80c9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"message\\\":\\\"W1124 08:35:31.786752 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 08:35:31.787432 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763973331 cert, and key in /tmp/serving-cert-3431097427/serving-signer.crt, /tmp/serving-cert-3431097427/serving-signer.key\\\\nI1124 08:35:31.933875 1 observer_polling.go:159] Starting file observer\\\\nW1124 08:35:31.937511 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 08:35:31.937667 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:31.938964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3431097427/tls.crt::/tmp/serving-cert-3431097427/tls.key\\\\\\\"\\\\nF1124 08:35:32.158060 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.584423 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdxxl\" (UniqueName: \"kubernetes.io/projected/f25c7267-0621-49ab-91e3-08d7d85c815d-kube-api-access-pdxxl\") pod \"multus-additional-cni-plugins-qbwmc\" (UID: \"f25c7267-0621-49ab-91e3-08d7d85c815d\") " pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.587249 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhqzp\" (UniqueName: \"kubernetes.io/projected/89887d07-87db-4f4f-a6fa-3cd34e814131-kube-api-access-rhqzp\") pod \"machine-config-daemon-575gl\" (UID: \"89887d07-87db-4f4f-a6fa-3cd34e814131\") " pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.618367 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.659698 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.678100 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.684838 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" Nov 24 08:35:49 crc kubenswrapper[4718]: W1124 08:35:49.688484 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod89887d07_87db_4f4f_a6fa_3cd34e814131.slice/crio-1cff8015809a8529d24fec94f872b429a85e7aaa37eeffd4c1de252f6ce2d3e7 WatchSource:0}: Error finding container 1cff8015809a8529d24fec94f872b429a85e7aaa37eeffd4c1de252f6ce2d3e7: Status 404 returned error can't find the container with id 1cff8015809a8529d24fec94f872b429a85e7aaa37eeffd4c1de252f6ce2d3e7 Nov 24 08:35:49 crc kubenswrapper[4718]: W1124 08:35:49.700052 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf25c7267_0621_49ab_91e3_08d7d85c815d.slice/crio-5e9357e1e65c6f3156efbf3c3e5e33f4b0cecd8ac8577e356585cdef5504525e WatchSource:0}: Error finding container 5e9357e1e65c6f3156efbf3c3e5e33f4b0cecd8ac8577e356585cdef5504525e: Status 404 returned error can't find the container with id 5e9357e1e65c6f3156efbf3c3e5e33f4b0cecd8ac8577e356585cdef5504525e Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.700191 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.716591 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2lk4b"] Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.717561 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.727769 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.733122 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.739908 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.752439 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.770589 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-var-lib-openvswitch\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.770640 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-ovn-node-metrics-cert\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.770668 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-kubelet\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.770694 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-etc-openvswitch\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.770717 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-node-log\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.770739 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-ovnkube-script-lib\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.770761 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-slash\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.770780 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-run-openvswitch\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.770798 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-cni-netd\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.770819 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-log-socket\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.770855 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-run-netns\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.770880 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-run-ovn-kubernetes\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.770901 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-ovnkube-config\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.770935 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-systemd-units\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.770963 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.771056 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-env-overrides\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.771126 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-run-systemd\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.771156 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tq976\" (UniqueName: \"kubernetes.io/projected/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-kube-api-access-tq976\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.771182 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-run-ovn\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.771206 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-cni-bin\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.772718 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.778734 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"e4a83dfa9c8de22063f0403f745223dbbda2c9225c343c27864d3dec3591bbf4"} Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.780318 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.782816 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" event={"ID":"f25c7267-0621-49ab-91e3-08d7d85c815d","Type":"ContainerStarted","Data":"5e9357e1e65c6f3156efbf3c3e5e33f4b0cecd8ac8577e356585cdef5504525e"} Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.783327 4718 scope.go:117] "RemoveContainer" containerID="5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b" Nov 24 08:35:49 crc kubenswrapper[4718]: E1124 08:35:49.783482 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.783762 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerStarted","Data":"1cff8015809a8529d24fec94f872b429a85e7aaa37eeffd4c1de252f6ce2d3e7"} Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.785422 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178"} Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.785458 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02"} Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.785473 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5d419206a1a8a98052b931f2ce4c317c085ad0fe3c8d8f918c2ff7c103e9ebf1"} Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.786369 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zvlvh" event={"ID":"811ba3ee-aad5-427c-84f7-fbd3b78255ec","Type":"ContainerStarted","Data":"e64870531b1124b3d4da635097ea62a0e7815a97ef0f3d28fa40344e826247a8"} Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.787639 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5"} Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.787671 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ec9e142e203d91572d76b60b07fb4e20f6b2149e6a4a890f5ade77979c8e25fd"} Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.791613 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-8cfq9" event={"ID":"ae67d599-f070-4cc9-a934-d546e1d84e7d","Type":"ContainerStarted","Data":"eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac"} Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.792582 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-8cfq9" event={"ID":"ae67d599-f070-4cc9-a934-d546e1d84e7d","Type":"ContainerStarted","Data":"7ecd27ce009e2ef48f220abfbee89ec1c6e76ba76e6baef8c112a64ff6a58339"} Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.793176 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.812494 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.832608 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.852994 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872288 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-cni-netd\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872332 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-slash\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872350 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-run-openvswitch\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872374 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-log-socket\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872402 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-run-netns\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872416 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-ovnkube-config\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872431 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-run-ovn-kubernetes\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872454 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-systemd-units\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872469 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872493 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-env-overrides\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872510 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-run-systemd\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872525 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tq976\" (UniqueName: \"kubernetes.io/projected/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-kube-api-access-tq976\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872588 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-run-ovn\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872607 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-cni-bin\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872632 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-var-lib-openvswitch\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872646 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-ovn-node-metrics-cert\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872669 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-kubelet\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872711 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-etc-openvswitch\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872726 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-node-log\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.872740 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-ovnkube-script-lib\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.873412 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-ovnkube-script-lib\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.873504 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-run-systemd\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.873706 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.873741 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-slash\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.873942 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-run-openvswitch\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.874072 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-cni-netd\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.874078 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-var-lib-openvswitch\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.874114 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-cni-bin\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.874130 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-run-ovn-kubernetes\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.874134 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-run-ovn\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.874208 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-kubelet\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.874165 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.874206 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-etc-openvswitch\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.874165 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-run-netns\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.874239 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-node-log\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.874242 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-systemd-units\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.874281 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-log-socket\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.874605 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-ovnkube-config\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.876715 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-ovn-node-metrics-cert\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.884030 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-env-overrides\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.898557 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.946500 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tq976\" (UniqueName: \"kubernetes.io/projected/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-kube-api-access-tq976\") pod \"ovnkube-node-2lk4b\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:49 crc kubenswrapper[4718]: I1124 08:35:49.959589 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f2c769ba6c0e50e87f46ec567abcacdaa958e75f7c76edab8a19c4d67fb80c9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"message\\\":\\\"W1124 08:35:31.786752 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 08:35:31.787432 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763973331 cert, and key in /tmp/serving-cert-3431097427/serving-signer.crt, /tmp/serving-cert-3431097427/serving-signer.key\\\\nI1124 08:35:31.933875 1 observer_polling.go:159] Starting file observer\\\\nW1124 08:35:31.937511 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 08:35:31.937667 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:31.938964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3431097427/tls.crt::/tmp/serving-cert-3431097427/tls.key\\\\\\\"\\\\nF1124 08:35:32.158060 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.010476 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.031473 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.052267 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.077824 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:50 crc kubenswrapper[4718]: W1124 08:35:50.095922 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod692d15f5_2875_47c6_92e3_3c99bfd6b7ea.slice/crio-9bf2ed57b6656aba8f69321f2f6b77c74f953e9d49a2a7dd16a4a3af519e8156 WatchSource:0}: Error finding container 9bf2ed57b6656aba8f69321f2f6b77c74f953e9d49a2a7dd16a4a3af519e8156: Status 404 returned error can't find the container with id 9bf2ed57b6656aba8f69321f2f6b77c74f953e9d49a2a7dd16a4a3af519e8156 Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.118195 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.159041 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.175130 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:35:50 crc kubenswrapper[4718]: E1124 08:35:50.175228 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:35:52.175207044 +0000 UTC m=+24.291497948 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.175265 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.175306 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.175338 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.175370 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:35:50 crc kubenswrapper[4718]: E1124 08:35:50.175432 4718 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 08:35:50 crc kubenswrapper[4718]: E1124 08:35:50.175453 4718 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 08:35:50 crc kubenswrapper[4718]: E1124 08:35:50.175479 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 08:35:52.175468921 +0000 UTC m=+24.291759825 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 08:35:50 crc kubenswrapper[4718]: E1124 08:35:50.175495 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 08:35:52.175488071 +0000 UTC m=+24.291778975 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 08:35:50 crc kubenswrapper[4718]: E1124 08:35:50.175529 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 08:35:50 crc kubenswrapper[4718]: E1124 08:35:50.175569 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 08:35:50 crc kubenswrapper[4718]: E1124 08:35:50.175583 4718 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:50 crc kubenswrapper[4718]: E1124 08:35:50.175649 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 08:35:52.175625225 +0000 UTC m=+24.291916189 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:50 crc kubenswrapper[4718]: E1124 08:35:50.175537 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 08:35:50 crc kubenswrapper[4718]: E1124 08:35:50.175686 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 08:35:50 crc kubenswrapper[4718]: E1124 08:35:50.175696 4718 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:50 crc kubenswrapper[4718]: E1124 08:35:50.175726 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 08:35:52.175717567 +0000 UTC m=+24.292008561 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.199110 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.239874 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.278452 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.320533 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.359596 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.403426 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:50Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.447835 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:50Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.489421 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:50Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.524962 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:50Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.564583 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:50Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.595438 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.595627 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:35:50 crc kubenswrapper[4718]: E1124 08:35:50.595686 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:35:50 crc kubenswrapper[4718]: E1124 08:35:50.595794 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.595501 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:35:50 crc kubenswrapper[4718]: E1124 08:35:50.595876 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.599356 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.600193 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.601359 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.602034 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.603194 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.603777 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.604494 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.605467 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.606158 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.607016 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:50Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.607305 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.608005 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.609255 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.609926 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.610517 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.611750 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.612418 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.613629 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.614267 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.615151 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.616318 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.616884 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.617888 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.618385 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.619415 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.619882 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.620556 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.621751 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.622286 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.623252 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.623729 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.624654 4718 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.624753 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.626440 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.626948 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.627780 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.629468 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.630378 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.630959 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.632139 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.633066 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.633597 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.634795 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.635451 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.636619 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.637170 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.638061 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.641683 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:50Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.681592 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:50Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.726871 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:50Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.761240 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:50Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.794977 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zvlvh" event={"ID":"811ba3ee-aad5-427c-84f7-fbd3b78255ec","Type":"ContainerStarted","Data":"1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342"} Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.796617 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerStarted","Data":"b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a"} Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.796658 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerStarted","Data":"c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67"} Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.797734 4718 generic.go:334] "Generic (PLEG): container finished" podID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerID="5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632" exitCode=0 Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.797801 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerDied","Data":"5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632"} Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.797822 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerStarted","Data":"9bf2ed57b6656aba8f69321f2f6b77c74f953e9d49a2a7dd16a4a3af519e8156"} Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.799008 4718 generic.go:334] "Generic (PLEG): container finished" podID="f25c7267-0621-49ab-91e3-08d7d85c815d" containerID="ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd" exitCode=0 Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.799079 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" event={"ID":"f25c7267-0621-49ab-91e3-08d7d85c815d","Type":"ContainerDied","Data":"ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd"} Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.799712 4718 scope.go:117] "RemoveContainer" containerID="5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b" Nov 24 08:35:50 crc kubenswrapper[4718]: E1124 08:35:50.799862 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.806023 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:50Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.814490 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-2zxtm"] Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.814853 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-2zxtm" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.832449 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.852858 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.872504 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.883146 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xzpp\" (UniqueName: \"kubernetes.io/projected/41d62746-e30a-4e15-a353-c2b4800bdae1-kube-api-access-2xzpp\") pod \"node-ca-2zxtm\" (UID: \"41d62746-e30a-4e15-a353-c2b4800bdae1\") " pod="openshift-image-registry/node-ca-2zxtm" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.883216 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/41d62746-e30a-4e15-a353-c2b4800bdae1-serviceca\") pod \"node-ca-2zxtm\" (UID: \"41d62746-e30a-4e15-a353-c2b4800bdae1\") " pod="openshift-image-registry/node-ca-2zxtm" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.883247 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/41d62746-e30a-4e15-a353-c2b4800bdae1-host\") pod \"node-ca-2zxtm\" (UID: \"41d62746-e30a-4e15-a353-c2b4800bdae1\") " pod="openshift-image-registry/node-ca-2zxtm" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.892588 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.920674 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:50Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.960809 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:50Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.984516 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xzpp\" (UniqueName: \"kubernetes.io/projected/41d62746-e30a-4e15-a353-c2b4800bdae1-kube-api-access-2xzpp\") pod \"node-ca-2zxtm\" (UID: \"41d62746-e30a-4e15-a353-c2b4800bdae1\") " pod="openshift-image-registry/node-ca-2zxtm" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.984569 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/41d62746-e30a-4e15-a353-c2b4800bdae1-serviceca\") pod \"node-ca-2zxtm\" (UID: \"41d62746-e30a-4e15-a353-c2b4800bdae1\") " pod="openshift-image-registry/node-ca-2zxtm" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.984595 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/41d62746-e30a-4e15-a353-c2b4800bdae1-host\") pod \"node-ca-2zxtm\" (UID: \"41d62746-e30a-4e15-a353-c2b4800bdae1\") " pod="openshift-image-registry/node-ca-2zxtm" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.984654 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/41d62746-e30a-4e15-a353-c2b4800bdae1-host\") pod \"node-ca-2zxtm\" (UID: \"41d62746-e30a-4e15-a353-c2b4800bdae1\") " pod="openshift-image-registry/node-ca-2zxtm" Nov 24 08:35:50 crc kubenswrapper[4718]: I1124 08:35:50.985555 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/41d62746-e30a-4e15-a353-c2b4800bdae1-serviceca\") pod \"node-ca-2zxtm\" (UID: \"41d62746-e30a-4e15-a353-c2b4800bdae1\") " pod="openshift-image-registry/node-ca-2zxtm" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.001725 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.035419 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xzpp\" (UniqueName: \"kubernetes.io/projected/41d62746-e30a-4e15-a353-c2b4800bdae1-kube-api-access-2xzpp\") pod \"node-ca-2zxtm\" (UID: \"41d62746-e30a-4e15-a353-c2b4800bdae1\") " pod="openshift-image-registry/node-ca-2zxtm" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.059175 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.102797 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.144471 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.150514 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-2zxtm" Nov 24 08:35:51 crc kubenswrapper[4718]: W1124 08:35:51.163362 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod41d62746_e30a_4e15_a353_c2b4800bdae1.slice/crio-8cd9f660d5c1d97a15335940c8b39d1ddc915147122bee5c6d25e600ec00ddbc WatchSource:0}: Error finding container 8cd9f660d5c1d97a15335940c8b39d1ddc915147122bee5c6d25e600ec00ddbc: Status 404 returned error can't find the container with id 8cd9f660d5c1d97a15335940c8b39d1ddc915147122bee5c6d25e600ec00ddbc Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.180676 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.222795 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.272791 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.298969 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.343333 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.384679 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.424838 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.476184 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.508947 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.544880 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.585976 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.630395 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.663752 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.706508 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.803709 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8"} Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.805163 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" event={"ID":"f25c7267-0621-49ab-91e3-08d7d85c815d","Type":"ContainerStarted","Data":"88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a"} Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.808055 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerStarted","Data":"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035"} Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.808097 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerStarted","Data":"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa"} Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.808108 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerStarted","Data":"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0"} Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.808116 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerStarted","Data":"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96"} Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.808135 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerStarted","Data":"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d"} Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.808143 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerStarted","Data":"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916"} Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.810345 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-2zxtm" event={"ID":"41d62746-e30a-4e15-a353-c2b4800bdae1","Type":"ContainerStarted","Data":"50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f"} Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.810403 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-2zxtm" event={"ID":"41d62746-e30a-4e15-a353-c2b4800bdae1","Type":"ContainerStarted","Data":"8cd9f660d5c1d97a15335940c8b39d1ddc915147122bee5c6d25e600ec00ddbc"} Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.825354 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.837610 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.852471 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.868339 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.900128 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.945841 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:51 crc kubenswrapper[4718]: I1124 08:35:51.981347 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:51Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.019732 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.061994 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.102988 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.142497 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.183543 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.195914 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:35:52 crc kubenswrapper[4718]: E1124 08:35:52.196121 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:35:56.196101398 +0000 UTC m=+28.312392302 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.196151 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.196197 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.196237 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.196265 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:35:52 crc kubenswrapper[4718]: E1124 08:35:52.196335 4718 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 08:35:52 crc kubenswrapper[4718]: E1124 08:35:52.196380 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 08:35:56.196368055 +0000 UTC m=+28.312658959 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 08:35:52 crc kubenswrapper[4718]: E1124 08:35:52.196469 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 08:35:52 crc kubenswrapper[4718]: E1124 08:35:52.196490 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 08:35:52 crc kubenswrapper[4718]: E1124 08:35:52.196500 4718 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:52 crc kubenswrapper[4718]: E1124 08:35:52.196537 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 08:35:56.196521159 +0000 UTC m=+28.312812063 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:52 crc kubenswrapper[4718]: E1124 08:35:52.196632 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 08:35:52 crc kubenswrapper[4718]: E1124 08:35:52.196660 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 08:35:52 crc kubenswrapper[4718]: E1124 08:35:52.196672 4718 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:52 crc kubenswrapper[4718]: E1124 08:35:52.196725 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 08:35:56.196710114 +0000 UTC m=+28.313001018 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:52 crc kubenswrapper[4718]: E1124 08:35:52.197022 4718 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 08:35:52 crc kubenswrapper[4718]: E1124 08:35:52.197118 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 08:35:56.197107134 +0000 UTC m=+28.313398038 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.220281 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.261975 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.302787 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.344585 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.382448 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.419277 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.461636 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.506870 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.541920 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.582247 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.595639 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.595697 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.595646 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:35:52 crc kubenswrapper[4718]: E1124 08:35:52.595744 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:35:52 crc kubenswrapper[4718]: E1124 08:35:52.595844 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:35:52 crc kubenswrapper[4718]: E1124 08:35:52.595899 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.620918 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.661063 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.701104 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.750919 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.778834 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.814997 4718 generic.go:334] "Generic (PLEG): container finished" podID="f25c7267-0621-49ab-91e3-08d7d85c815d" containerID="88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a" exitCode=0 Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.815070 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" event={"ID":"f25c7267-0621-49ab-91e3-08d7d85c815d","Type":"ContainerDied","Data":"88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a"} Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.822835 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.861230 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.900894 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.941434 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:52 crc kubenswrapper[4718]: I1124 08:35:52.983398 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:52Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.021433 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.061930 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.107912 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.144426 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.186433 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.241395 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.271148 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.301188 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.344521 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.383156 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.423158 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.462185 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.500534 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.821922 4718 generic.go:334] "Generic (PLEG): container finished" podID="f25c7267-0621-49ab-91e3-08d7d85c815d" containerID="45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c" exitCode=0 Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.821993 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" event={"ID":"f25c7267-0621-49ab-91e3-08d7d85c815d","Type":"ContainerDied","Data":"45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c"} Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.846388 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.862631 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.883934 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.894885 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.909244 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.951886 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.970405 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:53Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.978051 4718 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.980176 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.980234 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.980245 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.980377 4718 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.999126 4718 kubelet_node_status.go:115] "Node was previously registered" node="crc" Nov 24 08:35:53 crc kubenswrapper[4718]: I1124 08:35:53.999458 4718 kubelet_node_status.go:79] "Successfully registered node" node="crc" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.000782 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.000817 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.000828 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.000844 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.000857 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:54Z","lastTransitionTime":"2025-11-24T08:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.011039 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.026313 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: E1124 08:35:54.026453 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.029672 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.029701 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.029714 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.029730 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.029741 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:54Z","lastTransitionTime":"2025-11-24T08:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.039685 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: E1124 08:35:54.042917 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.046242 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.046296 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.046308 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.046324 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.046335 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:54Z","lastTransitionTime":"2025-11-24T08:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.052241 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: E1124 08:35:54.057299 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.062671 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.062713 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.062724 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.062750 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.062761 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:54Z","lastTransitionTime":"2025-11-24T08:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.066659 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: E1124 08:35:54.076609 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.080116 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.080142 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.080152 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.080169 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.080180 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:54Z","lastTransitionTime":"2025-11-24T08:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.080241 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: E1124 08:35:54.092924 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: E1124 08:35:54.093111 4718 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.095123 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.095163 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.095175 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.095193 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.095203 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:54Z","lastTransitionTime":"2025-11-24T08:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.099865 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.141363 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.197153 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.197194 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.197211 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.197226 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.197236 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:54Z","lastTransitionTime":"2025-11-24T08:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.298957 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.299009 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.299020 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.299036 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.299048 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:54Z","lastTransitionTime":"2025-11-24T08:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.410676 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.410719 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.410730 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.410747 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.410757 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:54Z","lastTransitionTime":"2025-11-24T08:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.513878 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.514122 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.514133 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.514166 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.514181 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:54Z","lastTransitionTime":"2025-11-24T08:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.598138 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:35:54 crc kubenswrapper[4718]: E1124 08:35:54.598243 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.598564 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:35:54 crc kubenswrapper[4718]: E1124 08:35:54.598615 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.598651 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:35:54 crc kubenswrapper[4718]: E1124 08:35:54.598686 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.616616 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.616643 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.616651 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.616665 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.616674 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:54Z","lastTransitionTime":"2025-11-24T08:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.719280 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.719306 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.719314 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.719326 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.719334 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:54Z","lastTransitionTime":"2025-11-24T08:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.821754 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.821781 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.821790 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.821803 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.821811 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:54Z","lastTransitionTime":"2025-11-24T08:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.826925 4718 generic.go:334] "Generic (PLEG): container finished" podID="f25c7267-0621-49ab-91e3-08d7d85c815d" containerID="2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656" exitCode=0 Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.827013 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" event={"ID":"f25c7267-0621-49ab-91e3-08d7d85c815d","Type":"ContainerDied","Data":"2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656"} Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.836385 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerStarted","Data":"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0"} Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.844718 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.864499 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.878167 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.892084 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.904855 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.915556 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.924431 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.924458 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.924467 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.924479 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.924488 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:54Z","lastTransitionTime":"2025-11-24T08:35:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.926591 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.946864 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.960360 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.975625 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:54 crc kubenswrapper[4718]: I1124 08:35:54.993577 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:54Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.004773 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:55Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.023229 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:55Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.027626 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.027702 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.027716 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.027742 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.027756 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:55Z","lastTransitionTime":"2025-11-24T08:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.036248 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:55Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.048804 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:55Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.130278 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.130319 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.130328 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.130345 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.130355 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:55Z","lastTransitionTime":"2025-11-24T08:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.233439 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.233511 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.233530 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.233554 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.233575 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:55Z","lastTransitionTime":"2025-11-24T08:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.336580 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.336637 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.336661 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.336686 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.336707 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:55Z","lastTransitionTime":"2025-11-24T08:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.439770 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.439826 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.439842 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.439864 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.439875 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:55Z","lastTransitionTime":"2025-11-24T08:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.542264 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.542325 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.542340 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.542363 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.542377 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:55Z","lastTransitionTime":"2025-11-24T08:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.645341 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.645381 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.645388 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.645403 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.645414 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:55Z","lastTransitionTime":"2025-11-24T08:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.747616 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.747670 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.747682 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.747702 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.747714 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:55Z","lastTransitionTime":"2025-11-24T08:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.845049 4718 generic.go:334] "Generic (PLEG): container finished" podID="f25c7267-0621-49ab-91e3-08d7d85c815d" containerID="2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367" exitCode=0 Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.845102 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" event={"ID":"f25c7267-0621-49ab-91e3-08d7d85c815d","Type":"ContainerDied","Data":"2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367"} Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.850617 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.850683 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.850706 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.850732 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.850745 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:55Z","lastTransitionTime":"2025-11-24T08:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.872516 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:55Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.889329 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:55Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.907171 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:55Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.923465 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:55Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.937025 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:55Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.950346 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:55Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.954575 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.954607 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.954615 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.954629 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.954639 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:55Z","lastTransitionTime":"2025-11-24T08:35:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.971103 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:55Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.982234 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:55Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:55 crc kubenswrapper[4718]: I1124 08:35:55.999543 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:55Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.013134 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.025702 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.040286 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.052561 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.056575 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.056614 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.056626 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.056641 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.056652 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:56Z","lastTransitionTime":"2025-11-24T08:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.063547 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.079279 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.159109 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.159139 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.159147 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.159161 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.159171 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:56Z","lastTransitionTime":"2025-11-24T08:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.241789 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.241941 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.241970 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.242031 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:35:56 crc kubenswrapper[4718]: E1124 08:35:56.242056 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:36:04.24203108 +0000 UTC m=+36.358321984 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:35:56 crc kubenswrapper[4718]: E1124 08:35:56.242096 4718 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 08:35:56 crc kubenswrapper[4718]: E1124 08:35:56.242114 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 08:35:56 crc kubenswrapper[4718]: E1124 08:35:56.242141 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 08:35:56 crc kubenswrapper[4718]: E1124 08:35:56.242158 4718 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.242099 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:35:56 crc kubenswrapper[4718]: E1124 08:35:56.242144 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 08:36:04.242130033 +0000 UTC m=+36.358420937 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 08:35:56 crc kubenswrapper[4718]: E1124 08:35:56.242233 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 08:36:04.242220095 +0000 UTC m=+36.358510999 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:56 crc kubenswrapper[4718]: E1124 08:35:56.242264 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 08:35:56 crc kubenswrapper[4718]: E1124 08:35:56.242260 4718 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 08:35:56 crc kubenswrapper[4718]: E1124 08:35:56.242364 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 08:36:04.242342898 +0000 UTC m=+36.358633882 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 08:35:56 crc kubenswrapper[4718]: E1124 08:35:56.242280 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 08:35:56 crc kubenswrapper[4718]: E1124 08:35:56.242401 4718 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:56 crc kubenswrapper[4718]: E1124 08:35:56.242448 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 08:36:04.242440141 +0000 UTC m=+36.358731165 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.261462 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.261511 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.261521 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.261539 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.261550 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:56Z","lastTransitionTime":"2025-11-24T08:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.363562 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.363589 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.363597 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.363610 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.363618 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:56Z","lastTransitionTime":"2025-11-24T08:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.474518 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.474585 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.474600 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.474625 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.474640 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:56Z","lastTransitionTime":"2025-11-24T08:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.576918 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.576966 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.577000 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.577016 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.577027 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:56Z","lastTransitionTime":"2025-11-24T08:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.595423 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.595461 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.595473 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:35:56 crc kubenswrapper[4718]: E1124 08:35:56.595573 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:35:56 crc kubenswrapper[4718]: E1124 08:35:56.595715 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:35:56 crc kubenswrapper[4718]: E1124 08:35:56.595857 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.681169 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.681215 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.681229 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.681248 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.681264 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:56Z","lastTransitionTime":"2025-11-24T08:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.783712 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.783767 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.783779 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.783797 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.783808 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:56Z","lastTransitionTime":"2025-11-24T08:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.854992 4718 generic.go:334] "Generic (PLEG): container finished" podID="f25c7267-0621-49ab-91e3-08d7d85c815d" containerID="8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889" exitCode=0 Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.855041 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" event={"ID":"f25c7267-0621-49ab-91e3-08d7d85c815d","Type":"ContainerDied","Data":"8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889"} Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.859591 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerStarted","Data":"d3ce0336db27eef196bedb7ea84158b773317543845001ff415827224da006ff"} Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.859870 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.873201 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.886327 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.886353 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.886362 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.886376 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.886386 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:56Z","lastTransitionTime":"2025-11-24T08:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.891704 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.904950 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.919308 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.922150 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.932613 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.947478 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.966743 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.980879 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.988629 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.988670 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.988678 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.988691 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.988700 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:56Z","lastTransitionTime":"2025-11-24T08:35:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:56 crc kubenswrapper[4718]: I1124 08:35:56.993762 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.003313 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.021899 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.032748 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.045083 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.056860 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.067350 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.081358 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.091796 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.091826 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.091837 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.091855 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.091869 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:57Z","lastTransitionTime":"2025-11-24T08:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.096633 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.113066 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.128081 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.139316 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.154466 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.170834 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.185783 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.193931 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.193998 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.194009 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.194024 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.194033 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:57Z","lastTransitionTime":"2025-11-24T08:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.207962 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.221863 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.235563 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.247216 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.264873 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3ce0336db27eef196bedb7ea84158b773317543845001ff415827224da006ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.274820 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.290294 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.296956 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.297019 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.297034 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.297054 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.297068 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:57Z","lastTransitionTime":"2025-11-24T08:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.400305 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.400355 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.400375 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.400398 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.400415 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:57Z","lastTransitionTime":"2025-11-24T08:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.504153 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.504206 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.504219 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.504241 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.504254 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:57Z","lastTransitionTime":"2025-11-24T08:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.607659 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.607720 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.607737 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.607757 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.607772 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:57Z","lastTransitionTime":"2025-11-24T08:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.710879 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.710939 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.710951 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.710992 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.711003 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:57Z","lastTransitionTime":"2025-11-24T08:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.813942 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.813996 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.814005 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.814018 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.814028 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:57Z","lastTransitionTime":"2025-11-24T08:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.869662 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" event={"ID":"f25c7267-0621-49ab-91e3-08d7d85c815d","Type":"ContainerStarted","Data":"465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f"} Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.869715 4718 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.870284 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.883300 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.893593 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.896214 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.911092 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.916341 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.916390 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.916403 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.916425 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.916442 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:57Z","lastTransitionTime":"2025-11-24T08:35:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.932555 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3ce0336db27eef196bedb7ea84158b773317543845001ff415827224da006ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.943479 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.957938 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.971906 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.983627 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:57 crc kubenswrapper[4718]: I1124 08:35:57.996255 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:57Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.010425 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.018562 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.018599 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.018612 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.018629 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.018643 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:58Z","lastTransitionTime":"2025-11-24T08:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.021714 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.034594 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.052208 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.069176 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.082443 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.097376 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.109734 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.121837 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.121904 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.121919 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.121938 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.121950 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:58Z","lastTransitionTime":"2025-11-24T08:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.122685 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.133698 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.145267 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.156142 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.172553 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.189749 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.210498 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.224332 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.224388 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.224399 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.224419 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.224433 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:58Z","lastTransitionTime":"2025-11-24T08:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.226181 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.239280 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.250373 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.271259 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3ce0336db27eef196bedb7ea84158b773317543845001ff415827224da006ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.293724 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.315254 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.327680 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.327735 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.327752 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.327773 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.327788 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:58Z","lastTransitionTime":"2025-11-24T08:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.430604 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.431169 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.431197 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.431218 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.431231 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:58Z","lastTransitionTime":"2025-11-24T08:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.533514 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.533545 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.533559 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.533574 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.533583 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:58Z","lastTransitionTime":"2025-11-24T08:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.596374 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.596423 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.596578 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:35:58 crc kubenswrapper[4718]: E1124 08:35:58.596569 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:35:58 crc kubenswrapper[4718]: E1124 08:35:58.596743 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:35:58 crc kubenswrapper[4718]: E1124 08:35:58.596860 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.609318 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.621387 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.633582 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.635781 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.635816 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.635826 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.635843 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.635855 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:58Z","lastTransitionTime":"2025-11-24T08:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.645682 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.661935 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.677003 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.695481 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.723593 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.739014 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.739061 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.739075 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.739095 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.739107 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:58Z","lastTransitionTime":"2025-11-24T08:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.739675 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.755280 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.771565 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.795250 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3ce0336db27eef196bedb7ea84158b773317543845001ff415827224da006ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.809596 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.826239 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.841385 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.841423 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.841433 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.841447 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.841456 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:58Z","lastTransitionTime":"2025-11-24T08:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.842876 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.874087 4718 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.944117 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.944170 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.944183 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.944202 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:58 crc kubenswrapper[4718]: I1124 08:35:58.944214 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:58Z","lastTransitionTime":"2025-11-24T08:35:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.047447 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.047510 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.047535 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.047576 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.047597 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:59Z","lastTransitionTime":"2025-11-24T08:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.150697 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.150782 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.150791 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.150806 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.150817 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:59Z","lastTransitionTime":"2025-11-24T08:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.253488 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.253523 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.253533 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.253549 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.253566 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:59Z","lastTransitionTime":"2025-11-24T08:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.356079 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.356116 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.356124 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.356138 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.356150 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:59Z","lastTransitionTime":"2025-11-24T08:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.459342 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.459404 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.459414 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.459433 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.459447 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:59Z","lastTransitionTime":"2025-11-24T08:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.562011 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.562075 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.562093 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.562108 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.562118 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:59Z","lastTransitionTime":"2025-11-24T08:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.664778 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.664813 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.664824 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.664842 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.664854 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:59Z","lastTransitionTime":"2025-11-24T08:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.766959 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.767020 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.767032 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.767048 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.767059 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:59Z","lastTransitionTime":"2025-11-24T08:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.869777 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.869820 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.869831 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.869848 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.869865 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:59Z","lastTransitionTime":"2025-11-24T08:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.878903 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovnkube-controller/0.log" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.881317 4718 generic.go:334] "Generic (PLEG): container finished" podID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerID="d3ce0336db27eef196bedb7ea84158b773317543845001ff415827224da006ff" exitCode=1 Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.881357 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerDied","Data":"d3ce0336db27eef196bedb7ea84158b773317543845001ff415827224da006ff"} Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.882031 4718 scope.go:117] "RemoveContainer" containerID="d3ce0336db27eef196bedb7ea84158b773317543845001ff415827224da006ff" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.898131 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:59Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.911065 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:59Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.922851 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:59Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.941372 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3ce0336db27eef196bedb7ea84158b773317543845001ff415827224da006ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3ce0336db27eef196bedb7ea84158b773317543845001ff415827224da006ff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:35:59Z\\\",\\\"message\\\":\\\"8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1124 08:35:59.557861 5994 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1124 08:35:59.557931 5994 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1124 08:35:59.557960 5994 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1124 08:35:59.558009 5994 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1124 08:35:59.558032 5994 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1124 08:35:59.558060 5994 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1124 08:35:59.558087 5994 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1124 08:35:59.558107 5994 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1124 08:35:59.558138 5994 handler.go:208] Removed *v1.Node event handler 2\\\\nI1124 08:35:59.558198 5994 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1124 08:35:59.558216 5994 handler.go:208] Removed *v1.Node event handler 7\\\\nI1124 08:35:59.558258 5994 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1124 08:35:59.558292 5994 factory.go:656] Stopping watch factory\\\\nI1124 08:35:59.558313 5994 ovnkube.go:599] Stopped ovnkube\\\\nI1124 08:35:59.558342 5994 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1124 08:35:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:59Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.950941 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:59Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.963440 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:59Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.972461 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.972494 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.972501 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.972517 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.972527 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:35:59Z","lastTransitionTime":"2025-11-24T08:35:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.977967 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:59Z is after 2025-08-24T17:21:41Z" Nov 24 08:35:59 crc kubenswrapper[4718]: I1124 08:35:59.996290 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:35:59Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.012331 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.025908 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.038842 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.052150 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.065360 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.075143 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.075182 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.075192 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.075207 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.075220 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:00Z","lastTransitionTime":"2025-11-24T08:36:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.078347 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.097092 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.177201 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.177258 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.177269 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.177289 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.177300 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:00Z","lastTransitionTime":"2025-11-24T08:36:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.279507 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.279546 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.279557 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.279570 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.279580 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:00Z","lastTransitionTime":"2025-11-24T08:36:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.382256 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.382304 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.382318 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.382344 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.382358 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:00Z","lastTransitionTime":"2025-11-24T08:36:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.484643 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.484686 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.484697 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.484713 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.484723 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:00Z","lastTransitionTime":"2025-11-24T08:36:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.587836 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.587893 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.587905 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.587921 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.587932 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:00Z","lastTransitionTime":"2025-11-24T08:36:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.595593 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.595803 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:00 crc kubenswrapper[4718]: E1124 08:36:00.595953 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.595989 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:00 crc kubenswrapper[4718]: E1124 08:36:00.596229 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:00 crc kubenswrapper[4718]: E1124 08:36:00.596118 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.691358 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.691425 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.691443 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.691471 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.691493 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:00Z","lastTransitionTime":"2025-11-24T08:36:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.794695 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.794739 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.794752 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.794771 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.794781 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:00Z","lastTransitionTime":"2025-11-24T08:36:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.888009 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovnkube-controller/1.log" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.888823 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovnkube-controller/0.log" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.892342 4718 generic.go:334] "Generic (PLEG): container finished" podID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerID="00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da" exitCode=1 Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.892404 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerDied","Data":"00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da"} Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.892470 4718 scope.go:117] "RemoveContainer" containerID="d3ce0336db27eef196bedb7ea84158b773317543845001ff415827224da006ff" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.893347 4718 scope.go:117] "RemoveContainer" containerID="00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da" Nov 24 08:36:00 crc kubenswrapper[4718]: E1124 08:36:00.893524 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.897678 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.897708 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.897719 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.897733 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.897743 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:00Z","lastTransitionTime":"2025-11-24T08:36:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.910041 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.931063 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3ce0336db27eef196bedb7ea84158b773317543845001ff415827224da006ff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:35:59Z\\\",\\\"message\\\":\\\"8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1124 08:35:59.557861 5994 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1124 08:35:59.557931 5994 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1124 08:35:59.557960 5994 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1124 08:35:59.558009 5994 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1124 08:35:59.558032 5994 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1124 08:35:59.558060 5994 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1124 08:35:59.558087 5994 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1124 08:35:59.558107 5994 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1124 08:35:59.558138 5994 handler.go:208] Removed *v1.Node event handler 2\\\\nI1124 08:35:59.558198 5994 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1124 08:35:59.558216 5994 handler.go:208] Removed *v1.Node event handler 7\\\\nI1124 08:35:59.558258 5994 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1124 08:35:59.558292 5994 factory.go:656] Stopping watch factory\\\\nI1124 08:35:59.558313 5994 ovnkube.go:599] Stopped ovnkube\\\\nI1124 08:35:59.558342 5994 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1124 08:35:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:00Z\\\",\\\"message\\\":\\\"ation, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z]\\\\nI1124 08:36:00.694833 6152 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager/controller-manager]} name:Service_openshift-controller-manager/controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.149:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {cab7c637-a021-4a4d-a4b9-06d63c44316f}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:00.694911 6152 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-2zxt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.945922 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.961468 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.977586 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:00 crc kubenswrapper[4718]: I1124 08:36:00.990271 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.000654 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.000722 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.000741 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.000768 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.000786 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:01Z","lastTransitionTime":"2025-11-24T08:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.003746 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.016336 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.035122 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.056018 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.075672 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.092842 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.103216 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.103287 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.103301 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.103326 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.103344 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:01Z","lastTransitionTime":"2025-11-24T08:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.114517 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.136531 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.151359 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.206039 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.206088 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.206098 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.206116 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.206129 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:01Z","lastTransitionTime":"2025-11-24T08:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.308662 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.308697 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.308712 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.308730 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.308741 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:01Z","lastTransitionTime":"2025-11-24T08:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.411317 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.411357 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.411368 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.411380 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.411391 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:01Z","lastTransitionTime":"2025-11-24T08:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.465245 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj"] Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.465712 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.467797 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.468236 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.479508 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.490261 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.499358 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.511227 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.514088 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.514122 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.514133 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.514148 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.514159 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:01Z","lastTransitionTime":"2025-11-24T08:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.530073 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.540563 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.553316 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.562960 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.572662 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.585019 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.596534 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.602519 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d59d24a4-3e05-4bad-a5ec-4e2689007e54-env-overrides\") pod \"ovnkube-control-plane-749d76644c-l6tzj\" (UID: \"d59d24a4-3e05-4bad-a5ec-4e2689007e54\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.602571 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d59d24a4-3e05-4bad-a5ec-4e2689007e54-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-l6tzj\" (UID: \"d59d24a4-3e05-4bad-a5ec-4e2689007e54\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.602597 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qln9x\" (UniqueName: \"kubernetes.io/projected/d59d24a4-3e05-4bad-a5ec-4e2689007e54-kube-api-access-qln9x\") pod \"ovnkube-control-plane-749d76644c-l6tzj\" (UID: \"d59d24a4-3e05-4bad-a5ec-4e2689007e54\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.602633 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d59d24a4-3e05-4bad-a5ec-4e2689007e54-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-l6tzj\" (UID: \"d59d24a4-3e05-4bad-a5ec-4e2689007e54\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.605685 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.616408 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.616445 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.616454 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.616467 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.616476 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:01Z","lastTransitionTime":"2025-11-24T08:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.621250 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3ce0336db27eef196bedb7ea84158b773317543845001ff415827224da006ff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:35:59Z\\\",\\\"message\\\":\\\"8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1124 08:35:59.557861 5994 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1124 08:35:59.557931 5994 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1124 08:35:59.557960 5994 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1124 08:35:59.558009 5994 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1124 08:35:59.558032 5994 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1124 08:35:59.558060 5994 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1124 08:35:59.558087 5994 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1124 08:35:59.558107 5994 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1124 08:35:59.558138 5994 handler.go:208] Removed *v1.Node event handler 2\\\\nI1124 08:35:59.558198 5994 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1124 08:35:59.558216 5994 handler.go:208] Removed *v1.Node event handler 7\\\\nI1124 08:35:59.558258 5994 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1124 08:35:59.558292 5994 factory.go:656] Stopping watch factory\\\\nI1124 08:35:59.558313 5994 ovnkube.go:599] Stopped ovnkube\\\\nI1124 08:35:59.558342 5994 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1124 08:35:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:00Z\\\",\\\"message\\\":\\\"ation, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z]\\\\nI1124 08:36:00.694833 6152 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager/controller-manager]} name:Service_openshift-controller-manager/controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.149:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {cab7c637-a021-4a4d-a4b9-06d63c44316f}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:00.694911 6152 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-2zxt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.631856 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.643120 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.654636 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.703401 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qln9x\" (UniqueName: \"kubernetes.io/projected/d59d24a4-3e05-4bad-a5ec-4e2689007e54-kube-api-access-qln9x\") pod \"ovnkube-control-plane-749d76644c-l6tzj\" (UID: \"d59d24a4-3e05-4bad-a5ec-4e2689007e54\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.703457 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d59d24a4-3e05-4bad-a5ec-4e2689007e54-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-l6tzj\" (UID: \"d59d24a4-3e05-4bad-a5ec-4e2689007e54\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.703519 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d59d24a4-3e05-4bad-a5ec-4e2689007e54-env-overrides\") pod \"ovnkube-control-plane-749d76644c-l6tzj\" (UID: \"d59d24a4-3e05-4bad-a5ec-4e2689007e54\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.703557 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d59d24a4-3e05-4bad-a5ec-4e2689007e54-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-l6tzj\" (UID: \"d59d24a4-3e05-4bad-a5ec-4e2689007e54\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.704241 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d59d24a4-3e05-4bad-a5ec-4e2689007e54-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-l6tzj\" (UID: \"d59d24a4-3e05-4bad-a5ec-4e2689007e54\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.704363 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d59d24a4-3e05-4bad-a5ec-4e2689007e54-env-overrides\") pod \"ovnkube-control-plane-749d76644c-l6tzj\" (UID: \"d59d24a4-3e05-4bad-a5ec-4e2689007e54\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.709602 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d59d24a4-3e05-4bad-a5ec-4e2689007e54-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-l6tzj\" (UID: \"d59d24a4-3e05-4bad-a5ec-4e2689007e54\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.719539 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.719588 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.719601 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.719618 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.719637 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:01Z","lastTransitionTime":"2025-11-24T08:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.725351 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qln9x\" (UniqueName: \"kubernetes.io/projected/d59d24a4-3e05-4bad-a5ec-4e2689007e54-kube-api-access-qln9x\") pod \"ovnkube-control-plane-749d76644c-l6tzj\" (UID: \"d59d24a4-3e05-4bad-a5ec-4e2689007e54\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.778564 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" Nov 24 08:36:01 crc kubenswrapper[4718]: W1124 08:36:01.791416 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd59d24a4_3e05_4bad_a5ec_4e2689007e54.slice/crio-6dc6e0667e09647bb79896d918875df46150e9bd8a481808483ee035c0f3c2dd WatchSource:0}: Error finding container 6dc6e0667e09647bb79896d918875df46150e9bd8a481808483ee035c0f3c2dd: Status 404 returned error can't find the container with id 6dc6e0667e09647bb79896d918875df46150e9bd8a481808483ee035c0f3c2dd Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.821453 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.821491 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.821502 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.821518 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.821529 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:01Z","lastTransitionTime":"2025-11-24T08:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.895424 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovnkube-controller/1.log" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.898529 4718 scope.go:117] "RemoveContainer" containerID="00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da" Nov 24 08:36:01 crc kubenswrapper[4718]: E1124 08:36:01.898661 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.898796 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" event={"ID":"d59d24a4-3e05-4bad-a5ec-4e2689007e54","Type":"ContainerStarted","Data":"6dc6e0667e09647bb79896d918875df46150e9bd8a481808483ee035c0f3c2dd"} Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.914003 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.924101 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.924137 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.924147 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.924163 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.924174 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:01Z","lastTransitionTime":"2025-11-24T08:36:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.926679 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.937282 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.948837 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.958932 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.968864 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.977713 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:01 crc kubenswrapper[4718]: I1124 08:36:01.993533 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:01Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.004580 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.018121 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.026757 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.026792 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.026801 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.026815 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.026828 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:02Z","lastTransitionTime":"2025-11-24T08:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.034458 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:00Z\\\",\\\"message\\\":\\\"ation, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z]\\\\nI1124 08:36:00.694833 6152 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager/controller-manager]} name:Service_openshift-controller-manager/controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.149:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {cab7c637-a021-4a4d-a4b9-06d63c44316f}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:00.694911 6152 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-2zxt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.043532 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.052372 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.064603 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.078176 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.088180 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.129809 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.129841 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.129852 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.129868 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.129880 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:02Z","lastTransitionTime":"2025-11-24T08:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.192615 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-ctdmz"] Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.193030 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:02 crc kubenswrapper[4718]: E1124 08:36:02.193088 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.207412 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.219769 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.231210 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.231696 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.231729 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.231740 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.231756 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.231767 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:02Z","lastTransitionTime":"2025-11-24T08:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.246147 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.263745 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.277408 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.289413 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.300344 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.308587 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs\") pod \"network-metrics-daemon-ctdmz\" (UID: \"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\") " pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.308644 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hv88j\" (UniqueName: \"kubernetes.io/projected/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-kube-api-access-hv88j\") pod \"network-metrics-daemon-ctdmz\" (UID: \"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\") " pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.309987 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.321152 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.333420 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.333457 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.333465 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.333479 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.333488 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:02Z","lastTransitionTime":"2025-11-24T08:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.336046 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.348033 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.358931 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.375769 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:00Z\\\",\\\"message\\\":\\\"ation, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z]\\\\nI1124 08:36:00.694833 6152 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager/controller-manager]} name:Service_openshift-controller-manager/controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.149:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {cab7c637-a021-4a4d-a4b9-06d63c44316f}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:00.694911 6152 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-2zxt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.389560 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.405708 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.409358 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs\") pod \"network-metrics-daemon-ctdmz\" (UID: \"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\") " pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.409420 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hv88j\" (UniqueName: \"kubernetes.io/projected/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-kube-api-access-hv88j\") pod \"network-metrics-daemon-ctdmz\" (UID: \"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\") " pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:02 crc kubenswrapper[4718]: E1124 08:36:02.409516 4718 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 08:36:02 crc kubenswrapper[4718]: E1124 08:36:02.409599 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs podName:3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97 nodeName:}" failed. No retries permitted until 2025-11-24 08:36:02.909578374 +0000 UTC m=+35.025869348 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs") pod "network-metrics-daemon-ctdmz" (UID: "3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.421420 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.425879 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hv88j\" (UniqueName: \"kubernetes.io/projected/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-kube-api-access-hv88j\") pod \"network-metrics-daemon-ctdmz\" (UID: \"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\") " pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.436034 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.436063 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.436072 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.436085 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.436093 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:02Z","lastTransitionTime":"2025-11-24T08:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.538517 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.538551 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.538561 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.538575 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.538585 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:02Z","lastTransitionTime":"2025-11-24T08:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.596508 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:02 crc kubenswrapper[4718]: E1124 08:36:02.596586 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.596657 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:02 crc kubenswrapper[4718]: E1124 08:36:02.596726 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.596740 4718 scope.go:117] "RemoveContainer" containerID="5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.596754 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:02 crc kubenswrapper[4718]: E1124 08:36:02.596828 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.640524 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.640562 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.640569 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.640583 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.640593 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:02Z","lastTransitionTime":"2025-11-24T08:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.742010 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.742048 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.742067 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.742083 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.742093 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:02Z","lastTransitionTime":"2025-11-24T08:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.844560 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.844591 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.844600 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.844614 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.844622 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:02Z","lastTransitionTime":"2025-11-24T08:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.903963 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" event={"ID":"d59d24a4-3e05-4bad-a5ec-4e2689007e54","Type":"ContainerStarted","Data":"430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2"} Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.904039 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" event={"ID":"d59d24a4-3e05-4bad-a5ec-4e2689007e54","Type":"ContainerStarted","Data":"d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb"} Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.906310 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.908105 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b"} Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.908410 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.913700 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.916085 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs\") pod \"network-metrics-daemon-ctdmz\" (UID: \"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\") " pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:02 crc kubenswrapper[4718]: E1124 08:36:02.916251 4718 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 08:36:02 crc kubenswrapper[4718]: E1124 08:36:02.916330 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs podName:3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97 nodeName:}" failed. No retries permitted until 2025-11-24 08:36:03.916317112 +0000 UTC m=+36.032608016 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs") pod "network-metrics-daemon-ctdmz" (UID: "3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.931608 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.943754 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.946480 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.946501 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.946509 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.946522 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.946532 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:02Z","lastTransitionTime":"2025-11-24T08:36:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.956623 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.966504 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.983688 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:00Z\\\",\\\"message\\\":\\\"ation, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z]\\\\nI1124 08:36:00.694833 6152 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager/controller-manager]} name:Service_openshift-controller-manager/controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.149:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {cab7c637-a021-4a4d-a4b9-06d63c44316f}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:00.694911 6152 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-2zxt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:02 crc kubenswrapper[4718]: I1124 08:36:02.992204 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:02Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.001955 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.011903 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.021690 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.032497 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.043747 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.048205 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.048241 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.048249 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.048264 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.048273 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:03Z","lastTransitionTime":"2025-11-24T08:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.054584 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.063031 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.074046 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.089255 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.099909 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.110705 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.136209 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.151075 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.151112 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.151122 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.151137 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.151147 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:03Z","lastTransitionTime":"2025-11-24T08:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.168023 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.180437 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.191143 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.202958 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.215575 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.233088 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.245052 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.253130 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.253168 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.253179 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.253194 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.253206 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:03Z","lastTransitionTime":"2025-11-24T08:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.255362 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.264741 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.281330 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:00Z\\\",\\\"message\\\":\\\"ation, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z]\\\\nI1124 08:36:00.694833 6152 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager/controller-manager]} name:Service_openshift-controller-manager/controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.149:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {cab7c637-a021-4a4d-a4b9-06d63c44316f}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:00.694911 6152 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-2zxt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.292041 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.302940 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.313728 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.323717 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.334219 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:03Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.355607 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.355638 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.355649 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.355664 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.355674 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:03Z","lastTransitionTime":"2025-11-24T08:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.457299 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.457338 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.457349 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.457367 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.457379 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:03Z","lastTransitionTime":"2025-11-24T08:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.559640 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.559682 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.559762 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.559776 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.559784 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:03Z","lastTransitionTime":"2025-11-24T08:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.596428 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:03 crc kubenswrapper[4718]: E1124 08:36:03.596619 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.662071 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.662174 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.662195 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.662221 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.662237 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:03Z","lastTransitionTime":"2025-11-24T08:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.764987 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.765019 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.765027 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.765044 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.765055 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:03Z","lastTransitionTime":"2025-11-24T08:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.869174 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.869233 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.869245 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.869264 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.869296 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:03Z","lastTransitionTime":"2025-11-24T08:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.925852 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs\") pod \"network-metrics-daemon-ctdmz\" (UID: \"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\") " pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:03 crc kubenswrapper[4718]: E1124 08:36:03.926148 4718 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 08:36:03 crc kubenswrapper[4718]: E1124 08:36:03.926303 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs podName:3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97 nodeName:}" failed. No retries permitted until 2025-11-24 08:36:05.926260595 +0000 UTC m=+38.042551539 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs") pod "network-metrics-daemon-ctdmz" (UID: "3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.971184 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.971219 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.971229 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.971242 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:03 crc kubenswrapper[4718]: I1124 08:36:03.971253 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:03Z","lastTransitionTime":"2025-11-24T08:36:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.073727 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.073772 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.073783 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.073816 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.073829 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:04Z","lastTransitionTime":"2025-11-24T08:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.153767 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.153807 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.153824 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.153842 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.153853 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:04Z","lastTransitionTime":"2025-11-24T08:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.165443 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:04Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.168424 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.168451 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.168460 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.168473 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.168482 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:04Z","lastTransitionTime":"2025-11-24T08:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.179463 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:04Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.182318 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.182350 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.182371 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.182390 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.182399 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:04Z","lastTransitionTime":"2025-11-24T08:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.196629 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:04Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.199792 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.199820 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.199828 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.199840 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.199849 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:04Z","lastTransitionTime":"2025-11-24T08:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.211628 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:04Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.215002 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.215025 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.215033 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.215047 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.215057 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:04Z","lastTransitionTime":"2025-11-24T08:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.224992 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:04Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.225106 4718 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.226499 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.226531 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.226544 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.226558 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.226568 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:04Z","lastTransitionTime":"2025-11-24T08:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.329289 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.329367 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.329378 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.329394 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.329406 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:04Z","lastTransitionTime":"2025-11-24T08:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.329458 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.329613 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.329682 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:36:20.329665188 +0000 UTC m=+52.445956092 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.329675 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.329721 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.329771 4718 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.329801 4718 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.329842 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 08:36:20.329820852 +0000 UTC m=+52.446111786 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.329875 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 08:36:20.329858563 +0000 UTC m=+52.446149557 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.329778 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.329849 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.329919 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.329940 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.329951 4718 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.329951 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.330023 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 08:36:20.330004127 +0000 UTC m=+52.446295091 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.330028 4718 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.330105 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 08:36:20.330084609 +0000 UTC m=+52.446375613 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.432041 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.432081 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.432089 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.432103 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.432112 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:04Z","lastTransitionTime":"2025-11-24T08:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.534005 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.534076 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.534096 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.534114 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.534125 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:04Z","lastTransitionTime":"2025-11-24T08:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.596398 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.596459 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.596494 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.596539 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.596638 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:04 crc kubenswrapper[4718]: E1124 08:36:04.596730 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.635586 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.635643 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.635661 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.635683 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.635699 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:04Z","lastTransitionTime":"2025-11-24T08:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.738910 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.738963 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.739001 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.739018 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.739031 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:04Z","lastTransitionTime":"2025-11-24T08:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.841420 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.841474 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.841483 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.841497 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.841534 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:04Z","lastTransitionTime":"2025-11-24T08:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.943823 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.943867 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.943876 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.943891 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:04 crc kubenswrapper[4718]: I1124 08:36:04.943901 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:04Z","lastTransitionTime":"2025-11-24T08:36:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.046437 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.046835 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.046852 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.046868 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.046893 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:05Z","lastTransitionTime":"2025-11-24T08:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.148939 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.149002 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.149015 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.149034 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.149046 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:05Z","lastTransitionTime":"2025-11-24T08:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.252086 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.252146 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.252157 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.252185 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.252201 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:05Z","lastTransitionTime":"2025-11-24T08:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.353942 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.353991 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.354000 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.354014 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.354023 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:05Z","lastTransitionTime":"2025-11-24T08:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.456797 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.456842 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.456852 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.456866 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.456877 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:05Z","lastTransitionTime":"2025-11-24T08:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.559652 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.559693 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.559704 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.559719 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.559727 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:05Z","lastTransitionTime":"2025-11-24T08:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.596129 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:05 crc kubenswrapper[4718]: E1124 08:36:05.596322 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.661770 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.661808 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.661817 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.661832 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.661841 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:05Z","lastTransitionTime":"2025-11-24T08:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.764398 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.764446 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.764457 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.764475 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.764487 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:05Z","lastTransitionTime":"2025-11-24T08:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.866939 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.867032 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.867049 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.867074 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.867091 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:05Z","lastTransitionTime":"2025-11-24T08:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.946026 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs\") pod \"network-metrics-daemon-ctdmz\" (UID: \"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\") " pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:05 crc kubenswrapper[4718]: E1124 08:36:05.946177 4718 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 08:36:05 crc kubenswrapper[4718]: E1124 08:36:05.946262 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs podName:3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97 nodeName:}" failed. No retries permitted until 2025-11-24 08:36:09.946245295 +0000 UTC m=+42.062536199 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs") pod "network-metrics-daemon-ctdmz" (UID: "3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.970105 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.970149 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.970159 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.970174 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:05 crc kubenswrapper[4718]: I1124 08:36:05.970184 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:05Z","lastTransitionTime":"2025-11-24T08:36:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.072455 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.072493 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.072504 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.072516 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.072525 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:06Z","lastTransitionTime":"2025-11-24T08:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.179285 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.179334 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.179342 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.179354 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.179362 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:06Z","lastTransitionTime":"2025-11-24T08:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.281854 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.281902 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.281911 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.281928 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.281944 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:06Z","lastTransitionTime":"2025-11-24T08:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.384274 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.384321 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.384332 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.384347 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.384357 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:06Z","lastTransitionTime":"2025-11-24T08:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.487173 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.487210 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.487219 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.487234 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.487243 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:06Z","lastTransitionTime":"2025-11-24T08:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.588988 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.589047 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.589055 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.589069 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.589078 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:06Z","lastTransitionTime":"2025-11-24T08:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.596244 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.596244 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.596353 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:06 crc kubenswrapper[4718]: E1124 08:36:06.596671 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:06 crc kubenswrapper[4718]: E1124 08:36:06.596780 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:06 crc kubenswrapper[4718]: E1124 08:36:06.596956 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.690810 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.690883 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.690906 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.690934 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.690954 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:06Z","lastTransitionTime":"2025-11-24T08:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.793660 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.793706 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.793733 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.793758 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.793773 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:06Z","lastTransitionTime":"2025-11-24T08:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.895999 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.896043 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.896054 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.896071 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.896084 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:06Z","lastTransitionTime":"2025-11-24T08:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.998828 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.998881 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.998897 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.998918 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:06 crc kubenswrapper[4718]: I1124 08:36:06.998934 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:06Z","lastTransitionTime":"2025-11-24T08:36:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.101216 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.101254 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.101265 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.101279 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.101288 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:07Z","lastTransitionTime":"2025-11-24T08:36:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.203699 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.203735 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.203744 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.203765 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.203780 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:07Z","lastTransitionTime":"2025-11-24T08:36:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.305576 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.305619 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.305630 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.305647 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.305660 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:07Z","lastTransitionTime":"2025-11-24T08:36:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.408161 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.408192 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.408200 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.408213 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.408221 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:07Z","lastTransitionTime":"2025-11-24T08:36:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.510021 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.510059 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.510069 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.510082 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.510092 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:07Z","lastTransitionTime":"2025-11-24T08:36:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.596197 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:07 crc kubenswrapper[4718]: E1124 08:36:07.596361 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.612152 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.612194 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.612206 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.612223 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.612241 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:07Z","lastTransitionTime":"2025-11-24T08:36:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.631493 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.632326 4718 scope.go:117] "RemoveContainer" containerID="00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da" Nov 24 08:36:07 crc kubenswrapper[4718]: E1124 08:36:07.632509 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.714354 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.714403 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.714414 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.714431 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.714447 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:07Z","lastTransitionTime":"2025-11-24T08:36:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.816894 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.816938 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.816951 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.816996 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.817010 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:07Z","lastTransitionTime":"2025-11-24T08:36:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.919438 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.919489 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.919497 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.919512 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:07 crc kubenswrapper[4718]: I1124 08:36:07.919522 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:07Z","lastTransitionTime":"2025-11-24T08:36:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.021621 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.021676 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.021686 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.021701 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.021712 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:08Z","lastTransitionTime":"2025-11-24T08:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.123391 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.123434 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.123444 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.123461 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.123473 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:08Z","lastTransitionTime":"2025-11-24T08:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.225293 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.225328 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.225336 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.225348 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.225357 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:08Z","lastTransitionTime":"2025-11-24T08:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.327864 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.327923 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.327934 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.327949 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.327958 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:08Z","lastTransitionTime":"2025-11-24T08:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.430388 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.430438 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.430449 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.430466 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.430476 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:08Z","lastTransitionTime":"2025-11-24T08:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.533260 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.533302 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.533313 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.533328 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.533340 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:08Z","lastTransitionTime":"2025-11-24T08:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.595509 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.595569 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:08 crc kubenswrapper[4718]: E1124 08:36:08.596119 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.596187 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:08 crc kubenswrapper[4718]: E1124 08:36:08.596251 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:08 crc kubenswrapper[4718]: E1124 08:36:08.596431 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.607406 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.625930 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:00Z\\\",\\\"message\\\":\\\"ation, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z]\\\\nI1124 08:36:00.694833 6152 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager/controller-manager]} name:Service_openshift-controller-manager/controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.149:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {cab7c637-a021-4a4d-a4b9-06d63c44316f}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:00.694911 6152 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-2zxt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.635715 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.635752 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.635762 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.635780 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.635791 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:08Z","lastTransitionTime":"2025-11-24T08:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.638207 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.649452 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.662406 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.674163 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.688881 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.700782 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.711804 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.720434 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.731621 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.738350 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.738396 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.738405 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.738420 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.738429 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:08Z","lastTransitionTime":"2025-11-24T08:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.743373 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.754182 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.766352 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.791559 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.807322 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.821920 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.840285 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.840310 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.840319 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.840332 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.840341 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:08Z","lastTransitionTime":"2025-11-24T08:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.943195 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.943245 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.943257 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.943278 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:08 crc kubenswrapper[4718]: I1124 08:36:08.943291 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:08Z","lastTransitionTime":"2025-11-24T08:36:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.045468 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.045538 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.045553 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.045572 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.045585 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:09Z","lastTransitionTime":"2025-11-24T08:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.148070 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.148112 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.148123 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.148137 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.148147 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:09Z","lastTransitionTime":"2025-11-24T08:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.250365 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.250403 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.250412 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.250428 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.250439 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:09Z","lastTransitionTime":"2025-11-24T08:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.354423 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.354487 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.354500 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.354526 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.354539 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:09Z","lastTransitionTime":"2025-11-24T08:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.457052 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.457081 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.457089 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.457105 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.457115 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:09Z","lastTransitionTime":"2025-11-24T08:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.559824 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.559904 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.559917 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.559993 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.560012 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:09Z","lastTransitionTime":"2025-11-24T08:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.595823 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:09 crc kubenswrapper[4718]: E1124 08:36:09.596059 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.662543 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.662579 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.662588 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.662602 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.662614 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:09Z","lastTransitionTime":"2025-11-24T08:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.765588 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.765669 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.765684 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.765702 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.765711 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:09Z","lastTransitionTime":"2025-11-24T08:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.868667 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.868706 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.868714 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.868727 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.868736 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:09Z","lastTransitionTime":"2025-11-24T08:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.971117 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.971163 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.971174 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.971189 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.971199 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:09Z","lastTransitionTime":"2025-11-24T08:36:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:09 crc kubenswrapper[4718]: I1124 08:36:09.983770 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs\") pod \"network-metrics-daemon-ctdmz\" (UID: \"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\") " pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:09 crc kubenswrapper[4718]: E1124 08:36:09.983882 4718 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 08:36:09 crc kubenswrapper[4718]: E1124 08:36:09.983924 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs podName:3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97 nodeName:}" failed. No retries permitted until 2025-11-24 08:36:17.983911605 +0000 UTC m=+50.100202509 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs") pod "network-metrics-daemon-ctdmz" (UID: "3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.073388 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.073417 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.073425 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.073437 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.073448 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:10Z","lastTransitionTime":"2025-11-24T08:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.176440 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.176500 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.176514 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.176532 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.176542 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:10Z","lastTransitionTime":"2025-11-24T08:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.279576 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.279629 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.279640 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.279660 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.279675 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:10Z","lastTransitionTime":"2025-11-24T08:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.384878 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.384925 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.384936 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.384965 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.385010 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:10Z","lastTransitionTime":"2025-11-24T08:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.488584 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.488665 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.488688 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.488721 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.488742 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:10Z","lastTransitionTime":"2025-11-24T08:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.591696 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.591780 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.591797 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.591817 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.591830 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:10Z","lastTransitionTime":"2025-11-24T08:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.596338 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.596357 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.596339 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:10 crc kubenswrapper[4718]: E1124 08:36:10.596510 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:10 crc kubenswrapper[4718]: E1124 08:36:10.596657 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:10 crc kubenswrapper[4718]: E1124 08:36:10.596772 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.695122 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.695184 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.695200 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.695219 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.695231 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:10Z","lastTransitionTime":"2025-11-24T08:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.798232 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.798294 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.798303 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.798318 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.798330 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:10Z","lastTransitionTime":"2025-11-24T08:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.902354 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.902483 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.902499 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.902522 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:10 crc kubenswrapper[4718]: I1124 08:36:10.902536 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:10Z","lastTransitionTime":"2025-11-24T08:36:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.005261 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.005310 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.005348 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.005367 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.005379 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:11Z","lastTransitionTime":"2025-11-24T08:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.108507 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.108553 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.108564 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.108579 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.108591 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:11Z","lastTransitionTime":"2025-11-24T08:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.211456 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.211492 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.211501 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.211516 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.211525 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:11Z","lastTransitionTime":"2025-11-24T08:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.313695 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.313769 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.313792 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.313821 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.313839 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:11Z","lastTransitionTime":"2025-11-24T08:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.416018 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.416068 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.416083 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.416099 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.416110 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:11Z","lastTransitionTime":"2025-11-24T08:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.518186 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.518223 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.518234 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.518251 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.518261 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:11Z","lastTransitionTime":"2025-11-24T08:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.596387 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:11 crc kubenswrapper[4718]: E1124 08:36:11.596597 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.620678 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.620723 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.620736 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.620753 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.620766 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:11Z","lastTransitionTime":"2025-11-24T08:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.723533 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.723603 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.723625 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.723654 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.723675 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:11Z","lastTransitionTime":"2025-11-24T08:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.826250 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.826305 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.826319 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.826337 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.826349 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:11Z","lastTransitionTime":"2025-11-24T08:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.928910 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.928964 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.929000 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.929021 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:11 crc kubenswrapper[4718]: I1124 08:36:11.929033 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:11Z","lastTransitionTime":"2025-11-24T08:36:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.032062 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.032147 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.032188 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.032222 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.032247 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:12Z","lastTransitionTime":"2025-11-24T08:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.134193 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.134253 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.134265 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.134280 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.134289 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:12Z","lastTransitionTime":"2025-11-24T08:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.237239 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.237282 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.237310 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.237329 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.237340 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:12Z","lastTransitionTime":"2025-11-24T08:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.340188 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.340251 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.340270 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.340301 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.340321 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:12Z","lastTransitionTime":"2025-11-24T08:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.443127 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.443171 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.443180 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.443197 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.443208 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:12Z","lastTransitionTime":"2025-11-24T08:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.545451 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.545501 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.545512 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.545529 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.545543 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:12Z","lastTransitionTime":"2025-11-24T08:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.596148 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.596207 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:12 crc kubenswrapper[4718]: E1124 08:36:12.596326 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.596339 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:12 crc kubenswrapper[4718]: E1124 08:36:12.596456 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:12 crc kubenswrapper[4718]: E1124 08:36:12.596606 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.648857 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.648914 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.648930 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.648950 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.648965 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:12Z","lastTransitionTime":"2025-11-24T08:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.752236 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.752277 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.752285 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.752302 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.752311 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:12Z","lastTransitionTime":"2025-11-24T08:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.854160 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.854198 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.854207 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.854221 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.854231 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:12Z","lastTransitionTime":"2025-11-24T08:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.956398 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.956443 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.956454 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.956470 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:12 crc kubenswrapper[4718]: I1124 08:36:12.956481 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:12Z","lastTransitionTime":"2025-11-24T08:36:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.058585 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.058627 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.058636 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.058653 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.058663 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:13Z","lastTransitionTime":"2025-11-24T08:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.160635 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.160679 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.160691 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.160707 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.160716 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:13Z","lastTransitionTime":"2025-11-24T08:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.263206 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.263246 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.263259 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.263276 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.263288 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:13Z","lastTransitionTime":"2025-11-24T08:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.365324 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.365364 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.365372 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.365385 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.365395 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:13Z","lastTransitionTime":"2025-11-24T08:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.467352 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.467403 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.467420 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.467443 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.467459 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:13Z","lastTransitionTime":"2025-11-24T08:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.569704 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.569732 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.569739 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.569751 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.569760 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:13Z","lastTransitionTime":"2025-11-24T08:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.595414 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:13 crc kubenswrapper[4718]: E1124 08:36:13.595513 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.671742 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.671788 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.671799 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.671818 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.671830 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:13Z","lastTransitionTime":"2025-11-24T08:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.774696 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.774734 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.774742 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.774756 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.774764 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:13Z","lastTransitionTime":"2025-11-24T08:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.876304 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.876352 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.876364 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.876380 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.876391 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:13Z","lastTransitionTime":"2025-11-24T08:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.978886 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.978929 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.978942 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.978959 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:13 crc kubenswrapper[4718]: I1124 08:36:13.978991 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:13Z","lastTransitionTime":"2025-11-24T08:36:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.081641 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.081685 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.081693 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.081709 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.081718 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:14Z","lastTransitionTime":"2025-11-24T08:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.184050 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.184123 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.184139 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.184162 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.184178 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:14Z","lastTransitionTime":"2025-11-24T08:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.286470 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.286510 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.286523 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.286541 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.286551 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:14Z","lastTransitionTime":"2025-11-24T08:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.388647 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.388692 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.388703 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.388720 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.388731 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:14Z","lastTransitionTime":"2025-11-24T08:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.490633 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.490671 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.490680 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.490695 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.490704 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:14Z","lastTransitionTime":"2025-11-24T08:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.502912 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.503037 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.503068 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.503152 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.503182 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:14Z","lastTransitionTime":"2025-11-24T08:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:14 crc kubenswrapper[4718]: E1124 08:36:14.517104 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:14Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.521477 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.521510 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.521519 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.521536 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.521548 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:14Z","lastTransitionTime":"2025-11-24T08:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:14 crc kubenswrapper[4718]: E1124 08:36:14.532459 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:14Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.535925 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.535963 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.535996 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.536017 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.536029 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:14Z","lastTransitionTime":"2025-11-24T08:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:14 crc kubenswrapper[4718]: E1124 08:36:14.548216 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:14Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.554272 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.554304 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.554313 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.554326 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.554336 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:14Z","lastTransitionTime":"2025-11-24T08:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:14 crc kubenswrapper[4718]: E1124 08:36:14.565632 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:14Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.571058 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.571107 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.571117 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.571136 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.571148 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:14Z","lastTransitionTime":"2025-11-24T08:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:14 crc kubenswrapper[4718]: E1124 08:36:14.583469 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:14Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:14 crc kubenswrapper[4718]: E1124 08:36:14.583643 4718 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.593124 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.593162 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.593175 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.593192 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.593205 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:14Z","lastTransitionTime":"2025-11-24T08:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.595914 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.595923 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.595943 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:14 crc kubenswrapper[4718]: E1124 08:36:14.596174 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:14 crc kubenswrapper[4718]: E1124 08:36:14.596258 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:14 crc kubenswrapper[4718]: E1124 08:36:14.596424 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.695107 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.695139 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.695153 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.695169 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.695180 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:14Z","lastTransitionTime":"2025-11-24T08:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.798150 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.798188 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.798200 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.798216 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.798228 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:14Z","lastTransitionTime":"2025-11-24T08:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.900957 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.901022 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.901032 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.901046 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:14 crc kubenswrapper[4718]: I1124 08:36:14.901057 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:14Z","lastTransitionTime":"2025-11-24T08:36:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.003485 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.003518 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.003528 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.003546 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.003563 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:15Z","lastTransitionTime":"2025-11-24T08:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.105550 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.105589 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.105597 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.105609 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.105619 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:15Z","lastTransitionTime":"2025-11-24T08:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.208065 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.208106 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.208115 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.208129 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.208145 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:15Z","lastTransitionTime":"2025-11-24T08:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.311632 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.311680 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.311716 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.311737 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.311749 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:15Z","lastTransitionTime":"2025-11-24T08:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.413919 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.413953 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.413963 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.413992 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.414002 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:15Z","lastTransitionTime":"2025-11-24T08:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.516627 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.516857 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.516962 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.517092 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.517184 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:15Z","lastTransitionTime":"2025-11-24T08:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.596074 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:15 crc kubenswrapper[4718]: E1124 08:36:15.596448 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.619249 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.619304 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.619314 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.619329 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.619337 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:15Z","lastTransitionTime":"2025-11-24T08:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.722225 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.722519 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.722585 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.722661 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.722735 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:15Z","lastTransitionTime":"2025-11-24T08:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.824630 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.824663 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.824672 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.824687 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.824695 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:15Z","lastTransitionTime":"2025-11-24T08:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.927425 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.927688 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.927750 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.927834 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:15 crc kubenswrapper[4718]: I1124 08:36:15.927894 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:15Z","lastTransitionTime":"2025-11-24T08:36:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.030417 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.030467 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.030477 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.030493 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.030504 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:16Z","lastTransitionTime":"2025-11-24T08:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.133219 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.133259 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.133268 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.133283 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.133292 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:16Z","lastTransitionTime":"2025-11-24T08:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.235510 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.235544 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.235552 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.235566 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.235574 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:16Z","lastTransitionTime":"2025-11-24T08:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.337264 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.337292 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.337299 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.337311 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.337319 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:16Z","lastTransitionTime":"2025-11-24T08:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.439545 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.439582 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.439594 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.439610 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.439622 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:16Z","lastTransitionTime":"2025-11-24T08:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.541615 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.541659 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.541673 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.541690 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.541700 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:16Z","lastTransitionTime":"2025-11-24T08:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.595829 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.595919 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.595829 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:16 crc kubenswrapper[4718]: E1124 08:36:16.595955 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:16 crc kubenswrapper[4718]: E1124 08:36:16.596075 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:16 crc kubenswrapper[4718]: E1124 08:36:16.596181 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.643794 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.643829 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.643840 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.643854 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.643863 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:16Z","lastTransitionTime":"2025-11-24T08:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.745861 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.745901 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.745911 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.745925 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.745935 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:16Z","lastTransitionTime":"2025-11-24T08:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.848904 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.848956 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.848989 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.849009 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.849020 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:16Z","lastTransitionTime":"2025-11-24T08:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.951522 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.951617 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.951634 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.951657 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:16 crc kubenswrapper[4718]: I1124 08:36:16.951669 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:16Z","lastTransitionTime":"2025-11-24T08:36:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.054204 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.054271 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.054299 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.054318 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.054331 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:17Z","lastTransitionTime":"2025-11-24T08:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.157690 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.157747 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.157758 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.157781 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.157792 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:17Z","lastTransitionTime":"2025-11-24T08:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.260552 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.260620 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.260634 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.260648 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.260657 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:17Z","lastTransitionTime":"2025-11-24T08:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.362748 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.362797 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.362806 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.362819 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.362831 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:17Z","lastTransitionTime":"2025-11-24T08:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.429539 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.443319 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.461593 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.466949 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.466997 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.467007 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.467020 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.467030 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:17Z","lastTransitionTime":"2025-11-24T08:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.491047 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.512660 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.531450 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.548820 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.565129 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.568824 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.568996 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.569087 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.569149 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.569208 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:17Z","lastTransitionTime":"2025-11-24T08:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.578930 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.595573 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:17 crc kubenswrapper[4718]: E1124 08:36:17.595747 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.599346 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:00Z\\\",\\\"message\\\":\\\"ation, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z]\\\\nI1124 08:36:00.694833 6152 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager/controller-manager]} name:Service_openshift-controller-manager/controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.149:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {cab7c637-a021-4a4d-a4b9-06d63c44316f}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:00.694911 6152 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-2zxt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.611652 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.623419 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.636576 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.648652 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.659355 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.670788 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.670851 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.670894 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.670904 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.670916 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.670926 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:17Z","lastTransitionTime":"2025-11-24T08:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.682782 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.693235 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.704734 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.773722 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.773754 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.773763 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.773776 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.773785 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:17Z","lastTransitionTime":"2025-11-24T08:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.875647 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.875738 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.875754 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.875787 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.875813 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:17Z","lastTransitionTime":"2025-11-24T08:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.922765 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.934965 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.946936 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.956765 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.968153 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.978346 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.978410 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.978429 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.978455 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.978472 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:17Z","lastTransitionTime":"2025-11-24T08:36:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:17 crc kubenswrapper[4718]: I1124 08:36:17.987850 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:00Z\\\",\\\"message\\\":\\\"ation, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z]\\\\nI1124 08:36:00.694833 6152 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager/controller-manager]} name:Service_openshift-controller-manager/controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.149:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {cab7c637-a021-4a4d-a4b9-06d63c44316f}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:00.694911 6152 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-2zxt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.001756 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:17Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.016039 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.035094 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.055353 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.070244 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba10360-c090-4095-8af1-5956450d250d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2447b7b95e023d5250d7f7f92372712537ac3698312da71d6c6c70b23bccf1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb96b198cd151b3cd4336ecbb27650bb3104c4c68d815ec5b911ee0aca16da2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946f83395434468ac3f7b67789108982e4341984166b966312fef9bcb1a9d48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.071265 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs\") pod \"network-metrics-daemon-ctdmz\" (UID: \"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\") " pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:18 crc kubenswrapper[4718]: E1124 08:36:18.071531 4718 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 08:36:18 crc kubenswrapper[4718]: E1124 08:36:18.071634 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs podName:3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97 nodeName:}" failed. No retries permitted until 2025-11-24 08:36:34.071604836 +0000 UTC m=+66.187895900 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs") pod "network-metrics-daemon-ctdmz" (UID: "3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.081674 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.081736 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.081754 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.081780 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.081800 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:18Z","lastTransitionTime":"2025-11-24T08:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.083678 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.095674 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.103943 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.115811 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.134357 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.145460 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.158009 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.168391 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.184163 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.184201 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.184213 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.184229 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.184241 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:18Z","lastTransitionTime":"2025-11-24T08:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.286305 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.286342 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.286353 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.286371 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.286382 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:18Z","lastTransitionTime":"2025-11-24T08:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.389003 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.389067 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.389084 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.389106 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.389118 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:18Z","lastTransitionTime":"2025-11-24T08:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.491740 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.491780 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.491812 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.491833 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.491848 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:18Z","lastTransitionTime":"2025-11-24T08:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.593959 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.594024 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.594036 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.594050 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.594059 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:18Z","lastTransitionTime":"2025-11-24T08:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.596206 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:18 crc kubenswrapper[4718]: E1124 08:36:18.596337 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.596853 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:18 crc kubenswrapper[4718]: E1124 08:36:18.596927 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.597061 4718 scope.go:117] "RemoveContainer" containerID="00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.597070 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:18 crc kubenswrapper[4718]: E1124 08:36:18.597271 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.615797 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba10360-c090-4095-8af1-5956450d250d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2447b7b95e023d5250d7f7f92372712537ac3698312da71d6c6c70b23bccf1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb96b198cd151b3cd4336ecbb27650bb3104c4c68d815ec5b911ee0aca16da2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946f83395434468ac3f7b67789108982e4341984166b966312fef9bcb1a9d48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.629939 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.643916 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.655370 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.670252 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.695806 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.695847 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.695859 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.695875 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.695886 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:18Z","lastTransitionTime":"2025-11-24T08:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.699782 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.712459 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.725262 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.734027 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.750403 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.763267 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.774411 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.794729 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:00Z\\\",\\\"message\\\":\\\"ation, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z]\\\\nI1124 08:36:00.694833 6152 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager/controller-manager]} name:Service_openshift-controller-manager/controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.149:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {cab7c637-a021-4a4d-a4b9-06d63c44316f}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:00.694911 6152 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-2zxt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.798136 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.798171 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.798180 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.798195 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.798204 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:18Z","lastTransitionTime":"2025-11-24T08:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.804784 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.814909 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.827148 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.839238 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.856743 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.900924 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.900959 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.900989 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.901007 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.901017 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:18Z","lastTransitionTime":"2025-11-24T08:36:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.956323 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovnkube-controller/1.log" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.958604 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerStarted","Data":"d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85"} Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.959122 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.978480 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:00Z\\\",\\\"message\\\":\\\"ation, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z]\\\\nI1124 08:36:00.694833 6152 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager/controller-manager]} name:Service_openshift-controller-manager/controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.149:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {cab7c637-a021-4a4d-a4b9-06d63c44316f}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:00.694911 6152 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-2zxt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:18 crc kubenswrapper[4718]: I1124 08:36:18.991672 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:18Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.003926 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.003959 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.003983 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.003998 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.004010 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:19Z","lastTransitionTime":"2025-11-24T08:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.005224 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:19Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.024210 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:19Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.036243 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:19Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.050283 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:19Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.062707 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:19Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.077073 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:19Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.090116 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:19Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.104227 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:19Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.106407 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.106444 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.106453 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.106467 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.106476 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:19Z","lastTransitionTime":"2025-11-24T08:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.121942 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba10360-c090-4095-8af1-5956450d250d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2447b7b95e023d5250d7f7f92372712537ac3698312da71d6c6c70b23bccf1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb96b198cd151b3cd4336ecbb27650bb3104c4c68d815ec5b911ee0aca16da2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946f83395434468ac3f7b67789108982e4341984166b966312fef9bcb1a9d48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:19Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.137426 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:19Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.149552 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:19Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.161223 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:19Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.180797 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:19Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.194729 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:19Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.208636 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.208705 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.208724 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.208752 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.208771 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:19Z","lastTransitionTime":"2025-11-24T08:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.212192 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:19Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.223358 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:19Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.311370 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.311438 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.311453 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.311470 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.311482 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:19Z","lastTransitionTime":"2025-11-24T08:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.413697 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.413740 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.413752 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.413767 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.413780 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:19Z","lastTransitionTime":"2025-11-24T08:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.516481 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.516516 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.516524 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.516538 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.516548 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:19Z","lastTransitionTime":"2025-11-24T08:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.595793 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:19 crc kubenswrapper[4718]: E1124 08:36:19.596119 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.619311 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.619350 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.619360 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.619374 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.619387 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:19Z","lastTransitionTime":"2025-11-24T08:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.721789 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.721874 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.721890 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.721928 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.721940 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:19Z","lastTransitionTime":"2025-11-24T08:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.824782 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.824827 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.824835 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.824848 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.824857 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:19Z","lastTransitionTime":"2025-11-24T08:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.927769 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.927811 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.927823 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.927838 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.927850 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:19Z","lastTransitionTime":"2025-11-24T08:36:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.964702 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovnkube-controller/2.log" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.965364 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovnkube-controller/1.log" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.968652 4718 generic.go:334] "Generic (PLEG): container finished" podID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerID="d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85" exitCode=1 Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.968702 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerDied","Data":"d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85"} Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.968742 4718 scope.go:117] "RemoveContainer" containerID="00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.969498 4718 scope.go:117] "RemoveContainer" containerID="d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85" Nov 24 08:36:19 crc kubenswrapper[4718]: E1124 08:36:19.969669 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" Nov 24 08:36:19 crc kubenswrapper[4718]: I1124 08:36:19.999070 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:19Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.016778 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:20Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.029893 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.029937 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.029947 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.029963 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.029998 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:20Z","lastTransitionTime":"2025-11-24T08:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.036920 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:20Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.049873 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:20Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.066007 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:20Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.080288 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:20Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.092490 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:20Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.111311 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00cb674c9becc8f44957646846220fa9be2d981cd8b7061808cd43123e9111da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:00Z\\\",\\\"message\\\":\\\"ation, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:00Z is after 2025-08-24T17:21:41Z]\\\\nI1124 08:36:00.694833 6152 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager/controller-manager]} name:Service_openshift-controller-manager/controller-manager_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.149:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {cab7c637-a021-4a4d-a4b9-06d63c44316f}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:00.694911 6152 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-2zxt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:19Z\\\",\\\"message\\\":\\\"l-plane-749d76644c-l6tzj\\\\nI1124 08:36:19.363957 6422 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj in node crc\\\\nI1124 08:36:19.363946 6422 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:19.363994 6422 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj after 0 failed attempt(s)\\\\nI1124 08:36:19.363568 6422 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-qbwmc after 0 failed attempt(s)\\\\nI1124 08:36:19.364004 6422 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj\\\\nI1124 08:36:19.364011 6422 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-qbwmc\\\\nF1124 08:36:19.363476 6422 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:36:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:20Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.122017 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:20Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.137569 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.137636 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.137656 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.137683 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.137703 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:20Z","lastTransitionTime":"2025-11-24T08:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.139480 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:20Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.159323 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:20Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.171586 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:20Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.188678 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:20Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.205518 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba10360-c090-4095-8af1-5956450d250d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2447b7b95e023d5250d7f7f92372712537ac3698312da71d6c6c70b23bccf1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb96b198cd151b3cd4336ecbb27650bb3104c4c68d815ec5b911ee0aca16da2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946f83395434468ac3f7b67789108982e4341984166b966312fef9bcb1a9d48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:20Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.221469 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:20Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.237195 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:20Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.240221 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.240262 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.240278 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.240297 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.240311 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:20Z","lastTransitionTime":"2025-11-24T08:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.249745 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:20Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.262176 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:20Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.342410 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.342446 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.342455 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.342467 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.342477 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:20Z","lastTransitionTime":"2025-11-24T08:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.393505 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.393640 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.393673 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:20 crc kubenswrapper[4718]: E1124 08:36:20.393765 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:36:52.393734348 +0000 UTC m=+84.510025252 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:36:20 crc kubenswrapper[4718]: E1124 08:36:20.393783 4718 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 08:36:20 crc kubenswrapper[4718]: E1124 08:36:20.393851 4718 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 08:36:20 crc kubenswrapper[4718]: E1124 08:36:20.393874 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 08:36:52.393861791 +0000 UTC m=+84.510152815 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 08:36:20 crc kubenswrapper[4718]: E1124 08:36:20.393963 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 08:36:20 crc kubenswrapper[4718]: E1124 08:36:20.394010 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 08:36:52.393966564 +0000 UTC m=+84.510257468 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 08:36:20 crc kubenswrapper[4718]: E1124 08:36:20.394018 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 08:36:20 crc kubenswrapper[4718]: E1124 08:36:20.394036 4718 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:36:20 crc kubenswrapper[4718]: E1124 08:36:20.394070 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 08:36:52.394060487 +0000 UTC m=+84.510351391 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.393851 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.394156 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:20 crc kubenswrapper[4718]: E1124 08:36:20.394257 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 08:36:20 crc kubenswrapper[4718]: E1124 08:36:20.394271 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 08:36:20 crc kubenswrapper[4718]: E1124 08:36:20.394281 4718 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:36:20 crc kubenswrapper[4718]: E1124 08:36:20.394319 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 08:36:52.394307093 +0000 UTC m=+84.510598097 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.444986 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.445260 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.445364 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.445452 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.445513 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:20Z","lastTransitionTime":"2025-11-24T08:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.548389 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.548630 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.548713 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.548820 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.548893 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:20Z","lastTransitionTime":"2025-11-24T08:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.596246 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.596317 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.596260 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:20 crc kubenswrapper[4718]: E1124 08:36:20.596387 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:20 crc kubenswrapper[4718]: E1124 08:36:20.596458 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:20 crc kubenswrapper[4718]: E1124 08:36:20.596510 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.651743 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.651778 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.651787 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.651799 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.651807 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:20Z","lastTransitionTime":"2025-11-24T08:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.754791 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.755052 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.755143 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.755250 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.755329 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:20Z","lastTransitionTime":"2025-11-24T08:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.857706 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.857753 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.857763 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.857777 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.857786 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:20Z","lastTransitionTime":"2025-11-24T08:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.960118 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.960164 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.960173 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.960185 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.960195 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:20Z","lastTransitionTime":"2025-11-24T08:36:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.973408 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovnkube-controller/2.log" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.976762 4718 scope.go:117] "RemoveContainer" containerID="d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85" Nov 24 08:36:20 crc kubenswrapper[4718]: E1124 08:36:20.976959 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" Nov 24 08:36:20 crc kubenswrapper[4718]: I1124 08:36:20.998272 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:20Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.011215 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:21Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.024845 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:21Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.037225 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:21Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.052593 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:21Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.062599 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.062638 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.062652 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.062672 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.062686 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:21Z","lastTransitionTime":"2025-11-24T08:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.066635 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:21Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.083224 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:21Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.101163 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:19Z\\\",\\\"message\\\":\\\"l-plane-749d76644c-l6tzj\\\\nI1124 08:36:19.363957 6422 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj in node crc\\\\nI1124 08:36:19.363946 6422 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:19.363994 6422 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj after 0 failed attempt(s)\\\\nI1124 08:36:19.363568 6422 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-qbwmc after 0 failed attempt(s)\\\\nI1124 08:36:19.364004 6422 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj\\\\nI1124 08:36:19.364011 6422 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-qbwmc\\\\nF1124 08:36:19.363476 6422 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:36:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:21Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.111494 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:21Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.122523 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:21Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.132194 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:21Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.143961 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:21Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.154954 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:21Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.164866 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.164907 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.164918 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.164935 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.164959 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:21Z","lastTransitionTime":"2025-11-24T08:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.165288 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba10360-c090-4095-8af1-5956450d250d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2447b7b95e023d5250d7f7f92372712537ac3698312da71d6c6c70b23bccf1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb96b198cd151b3cd4336ecbb27650bb3104c4c68d815ec5b911ee0aca16da2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946f83395434468ac3f7b67789108982e4341984166b966312fef9bcb1a9d48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:21Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.176485 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:21Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.188193 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:21Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.201637 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:21Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.218165 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:21Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.268003 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.268051 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.268066 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.268082 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.268094 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:21Z","lastTransitionTime":"2025-11-24T08:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.370399 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.370440 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.370449 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.370462 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.370472 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:21Z","lastTransitionTime":"2025-11-24T08:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.472613 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.472661 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.472672 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.472689 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.472702 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:21Z","lastTransitionTime":"2025-11-24T08:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.575389 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.575438 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.575447 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.575460 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.575469 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:21Z","lastTransitionTime":"2025-11-24T08:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.595640 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:21 crc kubenswrapper[4718]: E1124 08:36:21.595786 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.677537 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.677575 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.677585 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.677597 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.677606 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:21Z","lastTransitionTime":"2025-11-24T08:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.780398 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.780478 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.780498 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.780596 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.780616 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:21Z","lastTransitionTime":"2025-11-24T08:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.883560 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.883610 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.883627 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.883650 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.883687 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:21Z","lastTransitionTime":"2025-11-24T08:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.985865 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.985913 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.985924 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.985937 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:21 crc kubenswrapper[4718]: I1124 08:36:21.985946 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:21Z","lastTransitionTime":"2025-11-24T08:36:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.088783 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.088823 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.088831 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.088844 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.088855 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:22Z","lastTransitionTime":"2025-11-24T08:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.191343 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.191384 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.191392 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.191407 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.191416 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:22Z","lastTransitionTime":"2025-11-24T08:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.294200 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.294236 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.294248 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.294264 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.294273 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:22Z","lastTransitionTime":"2025-11-24T08:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.396700 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.396745 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.396753 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.396769 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.396779 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:22Z","lastTransitionTime":"2025-11-24T08:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.499405 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.499445 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.499455 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.499470 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.499479 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:22Z","lastTransitionTime":"2025-11-24T08:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.596319 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.596382 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.596415 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:22 crc kubenswrapper[4718]: E1124 08:36:22.596465 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:22 crc kubenswrapper[4718]: E1124 08:36:22.596616 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:22 crc kubenswrapper[4718]: E1124 08:36:22.596703 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.601927 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.601982 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.601994 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.602009 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.602021 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:22Z","lastTransitionTime":"2025-11-24T08:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.704709 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.704754 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.704765 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.704780 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.704791 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:22Z","lastTransitionTime":"2025-11-24T08:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.807511 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.807567 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.807580 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.807596 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.807611 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:22Z","lastTransitionTime":"2025-11-24T08:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.910209 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.910258 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.910270 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.910287 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:22 crc kubenswrapper[4718]: I1124 08:36:22.910303 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:22Z","lastTransitionTime":"2025-11-24T08:36:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.012497 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.012564 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.012579 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.012600 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.012616 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:23Z","lastTransitionTime":"2025-11-24T08:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.115516 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.115577 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.115592 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.115608 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.115620 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:23Z","lastTransitionTime":"2025-11-24T08:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.217990 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.218034 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.218045 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.218057 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.218067 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:23Z","lastTransitionTime":"2025-11-24T08:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.320508 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.320558 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.320574 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.320591 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.320602 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:23Z","lastTransitionTime":"2025-11-24T08:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.422998 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.423046 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.423058 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.423074 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.423086 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:23Z","lastTransitionTime":"2025-11-24T08:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.525185 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.525431 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.525446 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.525463 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.525476 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:23Z","lastTransitionTime":"2025-11-24T08:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.595719 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:23 crc kubenswrapper[4718]: E1124 08:36:23.595916 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.627209 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.627248 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.627257 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.627270 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.627279 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:23Z","lastTransitionTime":"2025-11-24T08:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.729733 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.729779 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.729789 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.729804 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.729814 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:23Z","lastTransitionTime":"2025-11-24T08:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.832338 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.832400 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.832410 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.832431 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.832444 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:23Z","lastTransitionTime":"2025-11-24T08:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.935364 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.935410 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.935421 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.935437 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:23 crc kubenswrapper[4718]: I1124 08:36:23.935449 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:23Z","lastTransitionTime":"2025-11-24T08:36:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.037912 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.037948 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.037956 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.037983 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.037992 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:24Z","lastTransitionTime":"2025-11-24T08:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.140673 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.140715 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.140728 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.140742 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.140753 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:24Z","lastTransitionTime":"2025-11-24T08:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.242909 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.242945 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.242953 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.242983 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.242993 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:24Z","lastTransitionTime":"2025-11-24T08:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.345491 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.345550 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.345562 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.345600 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.345614 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:24Z","lastTransitionTime":"2025-11-24T08:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.447819 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.447880 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.447894 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.447911 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.447923 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:24Z","lastTransitionTime":"2025-11-24T08:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.550670 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.550706 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.550715 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.550729 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.550738 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:24Z","lastTransitionTime":"2025-11-24T08:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.596418 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:24 crc kubenswrapper[4718]: E1124 08:36:24.596556 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.596757 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:24 crc kubenswrapper[4718]: E1124 08:36:24.596835 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.596894 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:24 crc kubenswrapper[4718]: E1124 08:36:24.596986 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.654155 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.654238 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.654257 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.654286 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.654309 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:24Z","lastTransitionTime":"2025-11-24T08:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.757755 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.757843 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.757858 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.757879 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.757893 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:24Z","lastTransitionTime":"2025-11-24T08:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.821942 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.822104 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.822126 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.822156 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.822176 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:24Z","lastTransitionTime":"2025-11-24T08:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:24 crc kubenswrapper[4718]: E1124 08:36:24.836447 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:24Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.840691 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.840768 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.840780 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.840798 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.840828 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:24Z","lastTransitionTime":"2025-11-24T08:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:24 crc kubenswrapper[4718]: E1124 08:36:24.853491 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:24Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.858487 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.858561 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.858575 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.858603 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.858618 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:24Z","lastTransitionTime":"2025-11-24T08:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:24 crc kubenswrapper[4718]: E1124 08:36:24.881375 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:24Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.886456 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.886524 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.886561 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.886605 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.886639 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:24Z","lastTransitionTime":"2025-11-24T08:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:24 crc kubenswrapper[4718]: E1124 08:36:24.901884 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:24Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.906854 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.906900 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.906917 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.906936 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.906950 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:24Z","lastTransitionTime":"2025-11-24T08:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:24 crc kubenswrapper[4718]: E1124 08:36:24.922705 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:24Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:24 crc kubenswrapper[4718]: E1124 08:36:24.922860 4718 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.924961 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.925030 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.925044 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.925070 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:24 crc kubenswrapper[4718]: I1124 08:36:24.925086 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:24Z","lastTransitionTime":"2025-11-24T08:36:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.027859 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.027893 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.027905 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.027918 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.027929 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:25Z","lastTransitionTime":"2025-11-24T08:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.130631 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.130678 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.130687 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.130703 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.130715 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:25Z","lastTransitionTime":"2025-11-24T08:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.233834 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.233890 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.233906 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.233929 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.233950 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:25Z","lastTransitionTime":"2025-11-24T08:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.336539 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.336587 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.336596 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.336611 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.336620 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:25Z","lastTransitionTime":"2025-11-24T08:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.439259 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.439289 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.439300 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.439317 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.439327 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:25Z","lastTransitionTime":"2025-11-24T08:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.542706 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.542808 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.542874 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.542914 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.542941 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:25Z","lastTransitionTime":"2025-11-24T08:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.595441 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:25 crc kubenswrapper[4718]: E1124 08:36:25.595707 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.646372 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.646433 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.646445 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.646464 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.646477 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:25Z","lastTransitionTime":"2025-11-24T08:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.750693 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.750752 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.750764 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.750789 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.750801 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:25Z","lastTransitionTime":"2025-11-24T08:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.853721 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.853805 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.853817 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.853928 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.853945 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:25Z","lastTransitionTime":"2025-11-24T08:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.958024 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.958085 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.958099 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.958126 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:25 crc kubenswrapper[4718]: I1124 08:36:25.958143 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:25Z","lastTransitionTime":"2025-11-24T08:36:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.060619 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.060672 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.060682 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.060694 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.060702 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:26Z","lastTransitionTime":"2025-11-24T08:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.163070 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.163120 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.163129 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.163151 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.163162 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:26Z","lastTransitionTime":"2025-11-24T08:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.265367 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.265411 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.265425 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.265449 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.265460 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:26Z","lastTransitionTime":"2025-11-24T08:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.368856 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.368932 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.368941 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.368958 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.368997 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:26Z","lastTransitionTime":"2025-11-24T08:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.471612 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.471652 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.471663 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.471681 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.471692 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:26Z","lastTransitionTime":"2025-11-24T08:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.574131 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.574205 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.574227 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.574259 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.574282 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:26Z","lastTransitionTime":"2025-11-24T08:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.595415 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.595427 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.595498 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:26 crc kubenswrapper[4718]: E1124 08:36:26.595591 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:26 crc kubenswrapper[4718]: E1124 08:36:26.595797 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:26 crc kubenswrapper[4718]: E1124 08:36:26.595878 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.676211 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.676253 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.676265 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.676283 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.676302 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:26Z","lastTransitionTime":"2025-11-24T08:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.778604 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.778644 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.778653 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.778668 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.778680 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:26Z","lastTransitionTime":"2025-11-24T08:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.881222 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.881252 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.881259 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.881271 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.881280 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:26Z","lastTransitionTime":"2025-11-24T08:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.983740 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.983786 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.983796 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.983815 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:26 crc kubenswrapper[4718]: I1124 08:36:26.983827 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:26Z","lastTransitionTime":"2025-11-24T08:36:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.086116 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.086158 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.086169 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.086185 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.086195 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:27Z","lastTransitionTime":"2025-11-24T08:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.188218 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.188259 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.188270 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.188284 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.188295 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:27Z","lastTransitionTime":"2025-11-24T08:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.290402 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.290665 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.290768 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.290904 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.291000 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:27Z","lastTransitionTime":"2025-11-24T08:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.393370 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.393410 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.393418 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.393432 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.393444 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:27Z","lastTransitionTime":"2025-11-24T08:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.495914 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.495950 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.495958 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.495992 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.496001 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:27Z","lastTransitionTime":"2025-11-24T08:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.596030 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:27 crc kubenswrapper[4718]: E1124 08:36:27.596765 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.598350 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.598377 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.598385 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.598398 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.598406 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:27Z","lastTransitionTime":"2025-11-24T08:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.703386 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.703442 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.703455 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.703473 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.703486 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:27Z","lastTransitionTime":"2025-11-24T08:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.805684 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.805741 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.805755 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.805771 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.805780 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:27Z","lastTransitionTime":"2025-11-24T08:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.908391 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.908436 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.908444 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.908459 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:27 crc kubenswrapper[4718]: I1124 08:36:27.908468 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:27Z","lastTransitionTime":"2025-11-24T08:36:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.010616 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.010658 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.010666 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.010680 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.010691 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:28Z","lastTransitionTime":"2025-11-24T08:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.113433 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.113476 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.113487 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.113506 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.113517 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:28Z","lastTransitionTime":"2025-11-24T08:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.216295 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.216328 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.216339 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.216352 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.216361 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:28Z","lastTransitionTime":"2025-11-24T08:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.319010 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.319043 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.319052 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.319064 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.319073 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:28Z","lastTransitionTime":"2025-11-24T08:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.421464 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.421521 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.421530 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.421551 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.421562 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:28Z","lastTransitionTime":"2025-11-24T08:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.525633 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.525686 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.525697 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.525711 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.525721 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:28Z","lastTransitionTime":"2025-11-24T08:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.595646 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.595725 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.596307 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:28 crc kubenswrapper[4718]: E1124 08:36:28.596435 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:28 crc kubenswrapper[4718]: E1124 08:36:28.596601 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:28 crc kubenswrapper[4718]: E1124 08:36:28.596824 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.609247 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba10360-c090-4095-8af1-5956450d250d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2447b7b95e023d5250d7f7f92372712537ac3698312da71d6c6c70b23bccf1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb96b198cd151b3cd4336ecbb27650bb3104c4c68d815ec5b911ee0aca16da2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946f83395434468ac3f7b67789108982e4341984166b966312fef9bcb1a9d48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:28Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.621689 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:28Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.628919 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.628960 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.629060 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.629082 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.629093 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:28Z","lastTransitionTime":"2025-11-24T08:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.633438 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:28Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.645223 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:28Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.657873 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:28Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.676417 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:28Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.695504 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:28Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.712811 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:28Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.723824 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:28Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.731165 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.731196 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.731209 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.731224 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.731235 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:28Z","lastTransitionTime":"2025-11-24T08:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.732887 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:28Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.742758 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:28Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.754907 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:28Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.765564 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:28Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.774416 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:28Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.790158 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:19Z\\\",\\\"message\\\":\\\"l-plane-749d76644c-l6tzj\\\\nI1124 08:36:19.363957 6422 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj in node crc\\\\nI1124 08:36:19.363946 6422 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:19.363994 6422 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj after 0 failed attempt(s)\\\\nI1124 08:36:19.363568 6422 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-qbwmc after 0 failed attempt(s)\\\\nI1124 08:36:19.364004 6422 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj\\\\nI1124 08:36:19.364011 6422 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-qbwmc\\\\nF1124 08:36:19.363476 6422 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:36:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:28Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.800234 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:28Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.811162 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:28Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.823004 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:28Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.833549 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.833712 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.833819 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.833919 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.834030 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:28Z","lastTransitionTime":"2025-11-24T08:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.936239 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.936272 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.936280 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.936293 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:28 crc kubenswrapper[4718]: I1124 08:36:28.936302 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:28Z","lastTransitionTime":"2025-11-24T08:36:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.037929 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.038050 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.038065 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.038077 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.038088 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:29Z","lastTransitionTime":"2025-11-24T08:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.140654 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.140685 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.140694 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.140706 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.140716 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:29Z","lastTransitionTime":"2025-11-24T08:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.242946 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.243012 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.243021 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.243034 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.243043 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:29Z","lastTransitionTime":"2025-11-24T08:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.345250 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.345297 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.345307 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.345321 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.345335 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:29Z","lastTransitionTime":"2025-11-24T08:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.447727 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.447765 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.447777 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.447792 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.447805 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:29Z","lastTransitionTime":"2025-11-24T08:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.550373 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.550416 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.550428 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.550446 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.550457 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:29Z","lastTransitionTime":"2025-11-24T08:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.596114 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:29 crc kubenswrapper[4718]: E1124 08:36:29.596256 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.653383 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.653437 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.653452 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.653469 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.653483 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:29Z","lastTransitionTime":"2025-11-24T08:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.756532 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.756571 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.756580 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.756592 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.756601 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:29Z","lastTransitionTime":"2025-11-24T08:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.858674 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.858730 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.858739 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.858752 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.858760 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:29Z","lastTransitionTime":"2025-11-24T08:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.961430 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.961481 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.961581 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.961598 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:29 crc kubenswrapper[4718]: I1124 08:36:29.961609 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:29Z","lastTransitionTime":"2025-11-24T08:36:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.063832 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.063869 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.063878 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.063890 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.063901 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:30Z","lastTransitionTime":"2025-11-24T08:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.166341 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.166429 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.166439 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.166453 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.166466 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:30Z","lastTransitionTime":"2025-11-24T08:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.268932 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.269024 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.269034 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.269049 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.269059 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:30Z","lastTransitionTime":"2025-11-24T08:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.371987 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.372039 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.372050 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.372066 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.372077 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:30Z","lastTransitionTime":"2025-11-24T08:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.475251 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.475310 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.475324 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.475344 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.475356 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:30Z","lastTransitionTime":"2025-11-24T08:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.577441 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.577471 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.577479 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.577492 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.577502 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:30Z","lastTransitionTime":"2025-11-24T08:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.595765 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:30 crc kubenswrapper[4718]: E1124 08:36:30.595902 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.596173 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.596287 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:30 crc kubenswrapper[4718]: E1124 08:36:30.596455 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:30 crc kubenswrapper[4718]: E1124 08:36:30.596501 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.679555 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.679595 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.679606 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.679622 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.679632 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:30Z","lastTransitionTime":"2025-11-24T08:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.781606 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.781635 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.781642 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.781657 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.781666 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:30Z","lastTransitionTime":"2025-11-24T08:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.884385 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.884416 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.884427 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.884438 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.884448 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:30Z","lastTransitionTime":"2025-11-24T08:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.986701 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.986742 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.986753 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.986770 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:30 crc kubenswrapper[4718]: I1124 08:36:30.986779 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:30Z","lastTransitionTime":"2025-11-24T08:36:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.089243 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.089279 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.089288 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.089303 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.089313 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:31Z","lastTransitionTime":"2025-11-24T08:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.191625 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.191660 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.191668 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.191679 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.191687 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:31Z","lastTransitionTime":"2025-11-24T08:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.293856 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.293908 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.293916 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.293930 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.293939 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:31Z","lastTransitionTime":"2025-11-24T08:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.396205 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.396244 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.396260 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.396283 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.396297 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:31Z","lastTransitionTime":"2025-11-24T08:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.500198 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.500260 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.500270 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.500285 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.500295 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:31Z","lastTransitionTime":"2025-11-24T08:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.596383 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:31 crc kubenswrapper[4718]: E1124 08:36:31.596558 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.603080 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.603118 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.603127 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.603141 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.603150 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:31Z","lastTransitionTime":"2025-11-24T08:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.705818 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.705850 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.705859 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.705872 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.705881 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:31Z","lastTransitionTime":"2025-11-24T08:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.808402 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.808450 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.808463 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.808482 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.808499 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:31Z","lastTransitionTime":"2025-11-24T08:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.911492 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.911543 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.911552 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.911566 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:31 crc kubenswrapper[4718]: I1124 08:36:31.911576 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:31Z","lastTransitionTime":"2025-11-24T08:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.013746 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.014049 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.014185 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.014266 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.014327 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:32Z","lastTransitionTime":"2025-11-24T08:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.116739 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.116997 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.117139 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.117251 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.117347 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:32Z","lastTransitionTime":"2025-11-24T08:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.219961 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.220019 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.220029 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.220043 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.220053 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:32Z","lastTransitionTime":"2025-11-24T08:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.322446 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.322711 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.322811 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.322909 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.323011 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:32Z","lastTransitionTime":"2025-11-24T08:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.425723 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.426028 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.426119 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.426214 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.426317 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:32Z","lastTransitionTime":"2025-11-24T08:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.530101 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.530146 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.530156 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.530172 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.530184 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:32Z","lastTransitionTime":"2025-11-24T08:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.595530 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.595562 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:32 crc kubenswrapper[4718]: E1124 08:36:32.595681 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.596065 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:32 crc kubenswrapper[4718]: E1124 08:36:32.596133 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:32 crc kubenswrapper[4718]: E1124 08:36:32.596204 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.596308 4718 scope.go:117] "RemoveContainer" containerID="d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85" Nov 24 08:36:32 crc kubenswrapper[4718]: E1124 08:36:32.596568 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.632820 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.632859 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.632873 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.632890 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.632903 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:32Z","lastTransitionTime":"2025-11-24T08:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.735964 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.736037 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.736050 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.736072 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.736083 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:32Z","lastTransitionTime":"2025-11-24T08:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.838499 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.838540 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.838549 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.838563 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.838572 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:32Z","lastTransitionTime":"2025-11-24T08:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.941100 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.941153 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.941165 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.941183 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:32 crc kubenswrapper[4718]: I1124 08:36:32.941194 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:32Z","lastTransitionTime":"2025-11-24T08:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.043655 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.043697 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.043707 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.043723 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.043735 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:33Z","lastTransitionTime":"2025-11-24T08:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.146105 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.146146 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.146155 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.146169 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.146179 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:33Z","lastTransitionTime":"2025-11-24T08:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.248569 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.248616 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.248627 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.248645 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.248656 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:33Z","lastTransitionTime":"2025-11-24T08:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.351304 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.351336 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.351346 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.351358 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.351368 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:33Z","lastTransitionTime":"2025-11-24T08:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.453035 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.453075 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.453086 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.453098 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.453109 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:33Z","lastTransitionTime":"2025-11-24T08:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.555351 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.555384 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.555393 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.555405 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.555414 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:33Z","lastTransitionTime":"2025-11-24T08:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.595904 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:33 crc kubenswrapper[4718]: E1124 08:36:33.596419 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.657873 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.657950 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.657961 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.657994 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.658005 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:33Z","lastTransitionTime":"2025-11-24T08:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.760368 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.760399 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.760409 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.760424 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.760450 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:33Z","lastTransitionTime":"2025-11-24T08:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.862722 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.863437 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.863473 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.863497 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.863508 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:33Z","lastTransitionTime":"2025-11-24T08:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.966195 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.966247 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.966262 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.966277 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:33 crc kubenswrapper[4718]: I1124 08:36:33.966287 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:33Z","lastTransitionTime":"2025-11-24T08:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.068196 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.068234 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.068244 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.068257 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.068268 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:34Z","lastTransitionTime":"2025-11-24T08:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.168464 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs\") pod \"network-metrics-daemon-ctdmz\" (UID: \"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\") " pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:34 crc kubenswrapper[4718]: E1124 08:36:34.168588 4718 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 08:36:34 crc kubenswrapper[4718]: E1124 08:36:34.168650 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs podName:3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97 nodeName:}" failed. No retries permitted until 2025-11-24 08:37:06.168631911 +0000 UTC m=+98.284922815 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs") pod "network-metrics-daemon-ctdmz" (UID: "3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.169746 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.169768 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.169776 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.169790 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.169798 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:34Z","lastTransitionTime":"2025-11-24T08:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.271397 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.271437 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.271455 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.271473 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.271484 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:34Z","lastTransitionTime":"2025-11-24T08:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.373869 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.373921 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.373934 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.373956 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.373984 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:34Z","lastTransitionTime":"2025-11-24T08:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.476365 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.476429 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.476439 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.476456 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.476466 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:34Z","lastTransitionTime":"2025-11-24T08:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.578213 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.578245 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.578254 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.578266 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.578275 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:34Z","lastTransitionTime":"2025-11-24T08:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.596226 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.596283 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.596283 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:34 crc kubenswrapper[4718]: E1124 08:36:34.596379 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:34 crc kubenswrapper[4718]: E1124 08:36:34.596484 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:34 crc kubenswrapper[4718]: E1124 08:36:34.596561 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.680374 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.680437 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.680450 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.680464 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.680475 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:34Z","lastTransitionTime":"2025-11-24T08:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.783310 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.783350 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.783362 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.783381 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.783392 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:34Z","lastTransitionTime":"2025-11-24T08:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.886131 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.886427 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.886507 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.886593 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.886657 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:34Z","lastTransitionTime":"2025-11-24T08:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.989295 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.989347 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.989357 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.989370 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:34 crc kubenswrapper[4718]: I1124 08:36:34.989379 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:34Z","lastTransitionTime":"2025-11-24T08:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.091708 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.091787 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.091801 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.091823 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.091838 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:35Z","lastTransitionTime":"2025-11-24T08:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.193838 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.193872 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.193884 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.193901 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.193912 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:35Z","lastTransitionTime":"2025-11-24T08:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.283594 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.283630 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.283641 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.283655 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.283664 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:35Z","lastTransitionTime":"2025-11-24T08:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:35 crc kubenswrapper[4718]: E1124 08:36:35.297078 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:35Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.301430 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.301542 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.301559 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.301602 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.301624 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:35Z","lastTransitionTime":"2025-11-24T08:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:35 crc kubenswrapper[4718]: E1124 08:36:35.315632 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:35Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.318481 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.318504 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.318512 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.318524 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.318533 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:35Z","lastTransitionTime":"2025-11-24T08:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:35 crc kubenswrapper[4718]: E1124 08:36:35.329378 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:35Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.333121 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.333162 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.333174 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.333190 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.333200 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:35Z","lastTransitionTime":"2025-11-24T08:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:35 crc kubenswrapper[4718]: E1124 08:36:35.344806 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:35Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.348516 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.348546 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.348558 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.348574 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.348586 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:35Z","lastTransitionTime":"2025-11-24T08:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:35 crc kubenswrapper[4718]: E1124 08:36:35.360579 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:35Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:35 crc kubenswrapper[4718]: E1124 08:36:35.360731 4718 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.362453 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.362491 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.362504 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.362521 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.362535 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:35Z","lastTransitionTime":"2025-11-24T08:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.464771 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.465476 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.465504 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.465530 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.465546 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:35Z","lastTransitionTime":"2025-11-24T08:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.567792 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.567844 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.567854 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.567870 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.567882 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:35Z","lastTransitionTime":"2025-11-24T08:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.595857 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:35 crc kubenswrapper[4718]: E1124 08:36:35.596012 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.670029 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.670322 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.670416 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.670504 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.670588 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:35Z","lastTransitionTime":"2025-11-24T08:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.773414 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.773453 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.773464 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.773480 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.773492 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:35Z","lastTransitionTime":"2025-11-24T08:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.875712 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.876303 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.876378 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.876457 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.876529 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:35Z","lastTransitionTime":"2025-11-24T08:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.979125 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.979154 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.979162 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.979174 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:35 crc kubenswrapper[4718]: I1124 08:36:35.979184 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:35Z","lastTransitionTime":"2025-11-24T08:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.081844 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.081895 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.081907 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.081926 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.081938 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:36Z","lastTransitionTime":"2025-11-24T08:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.184601 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.184934 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.185061 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.185171 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.185256 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:36Z","lastTransitionTime":"2025-11-24T08:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.287726 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.288024 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.288115 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.288212 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.288349 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:36Z","lastTransitionTime":"2025-11-24T08:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.391249 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.391289 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.391298 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.391311 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.391324 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:36Z","lastTransitionTime":"2025-11-24T08:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.493606 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.493658 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.493670 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.493690 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.493703 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:36Z","lastTransitionTime":"2025-11-24T08:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.595625 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.595675 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:36 crc kubenswrapper[4718]: E1124 08:36:36.595773 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.595828 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:36 crc kubenswrapper[4718]: E1124 08:36:36.595939 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:36 crc kubenswrapper[4718]: E1124 08:36:36.596087 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.596687 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.596735 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.596745 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.596758 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.596768 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:36Z","lastTransitionTime":"2025-11-24T08:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.699126 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.699220 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.699234 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.699250 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.699261 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:36Z","lastTransitionTime":"2025-11-24T08:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.801645 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.801695 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.801708 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.801727 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.801743 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:36Z","lastTransitionTime":"2025-11-24T08:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.904219 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.904275 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.904287 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.904305 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:36 crc kubenswrapper[4718]: I1124 08:36:36.904321 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:36Z","lastTransitionTime":"2025-11-24T08:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.006536 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.006564 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.006572 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.006585 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.006597 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:37Z","lastTransitionTime":"2025-11-24T08:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.108403 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.108446 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.108457 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.108474 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.108484 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:37Z","lastTransitionTime":"2025-11-24T08:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.211095 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.211139 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.211153 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.211171 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.211181 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:37Z","lastTransitionTime":"2025-11-24T08:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.313383 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.313432 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.313440 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.313453 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.313462 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:37Z","lastTransitionTime":"2025-11-24T08:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.416657 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.416951 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.417085 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.417190 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.417272 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:37Z","lastTransitionTime":"2025-11-24T08:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.519689 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.519812 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.519834 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.519861 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.519878 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:37Z","lastTransitionTime":"2025-11-24T08:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.596224 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:37 crc kubenswrapper[4718]: E1124 08:36:37.596441 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.622235 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.622298 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.622310 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.622329 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.622341 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:37Z","lastTransitionTime":"2025-11-24T08:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.724414 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.724456 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.724467 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.724483 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.724494 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:37Z","lastTransitionTime":"2025-11-24T08:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.827009 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.827054 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.827064 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.827079 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.827094 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:37Z","lastTransitionTime":"2025-11-24T08:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.930183 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.930235 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.930246 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.930260 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:37 crc kubenswrapper[4718]: I1124 08:36:37.930273 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:37Z","lastTransitionTime":"2025-11-24T08:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.032781 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.032820 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.032830 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.032842 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.032852 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:38Z","lastTransitionTime":"2025-11-24T08:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.135082 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.135119 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.135128 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.135142 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.135152 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:38Z","lastTransitionTime":"2025-11-24T08:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.237781 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.237822 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.237840 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.237860 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.237871 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:38Z","lastTransitionTime":"2025-11-24T08:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.340713 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.341203 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.341216 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.341233 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.341249 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:38Z","lastTransitionTime":"2025-11-24T08:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.445810 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.445857 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.445867 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.445882 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.445893 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:38Z","lastTransitionTime":"2025-11-24T08:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.548666 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.548717 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.548728 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.548743 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.548752 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:38Z","lastTransitionTime":"2025-11-24T08:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.595473 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.595493 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:38 crc kubenswrapper[4718]: E1124 08:36:38.595677 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.595493 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:38 crc kubenswrapper[4718]: E1124 08:36:38.595811 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:38 crc kubenswrapper[4718]: E1124 08:36:38.595887 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.608834 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:38Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.620147 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:38Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.632253 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:38Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.643820 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba10360-c090-4095-8af1-5956450d250d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2447b7b95e023d5250d7f7f92372712537ac3698312da71d6c6c70b23bccf1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb96b198cd151b3cd4336ecbb27650bb3104c4c68d815ec5b911ee0aca16da2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946f83395434468ac3f7b67789108982e4341984166b966312fef9bcb1a9d48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:38Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.651422 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.651462 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.651473 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.651489 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.651501 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:38Z","lastTransitionTime":"2025-11-24T08:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.656435 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:38Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.669288 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:38Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.679643 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:38Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.694042 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:38Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.712665 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:38Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.725490 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:38Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.754727 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.754778 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.754792 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.754817 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.754835 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:38Z","lastTransitionTime":"2025-11-24T08:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.756380 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:38Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.794636 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:38Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.809505 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:38Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.820966 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:38Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.832129 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:38Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.850087 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:19Z\\\",\\\"message\\\":\\\"l-plane-749d76644c-l6tzj\\\\nI1124 08:36:19.363957 6422 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj in node crc\\\\nI1124 08:36:19.363946 6422 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:19.363994 6422 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj after 0 failed attempt(s)\\\\nI1124 08:36:19.363568 6422 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-qbwmc after 0 failed attempt(s)\\\\nI1124 08:36:19.364004 6422 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj\\\\nI1124 08:36:19.364011 6422 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-qbwmc\\\\nF1124 08:36:19.363476 6422 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:36:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:38Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.858127 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.858176 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.858190 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.858206 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.858218 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:38Z","lastTransitionTime":"2025-11-24T08:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.861563 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:38Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.872248 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:38Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.961402 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.961473 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.961488 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.961723 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:38 crc kubenswrapper[4718]: I1124 08:36:38.961743 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:38Z","lastTransitionTime":"2025-11-24T08:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.027391 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zvlvh_811ba3ee-aad5-427c-84f7-fbd3b78255ec/kube-multus/0.log" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.027436 4718 generic.go:334] "Generic (PLEG): container finished" podID="811ba3ee-aad5-427c-84f7-fbd3b78255ec" containerID="1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342" exitCode=1 Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.027468 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zvlvh" event={"ID":"811ba3ee-aad5-427c-84f7-fbd3b78255ec","Type":"ContainerDied","Data":"1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342"} Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.027844 4718 scope.go:117] "RemoveContainer" containerID="1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.042565 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:39Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.055556 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:39Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.065031 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.065071 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.065082 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.065104 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.065117 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:39Z","lastTransitionTime":"2025-11-24T08:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.066465 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:39Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.084075 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:19Z\\\",\\\"message\\\":\\\"l-plane-749d76644c-l6tzj\\\\nI1124 08:36:19.363957 6422 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj in node crc\\\\nI1124 08:36:19.363946 6422 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:19.363994 6422 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj after 0 failed attempt(s)\\\\nI1124 08:36:19.363568 6422 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-qbwmc after 0 failed attempt(s)\\\\nI1124 08:36:19.364004 6422 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj\\\\nI1124 08:36:19.364011 6422 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-qbwmc\\\\nF1124 08:36:19.363476 6422 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:36:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:39Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.094828 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:39Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.105712 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:39Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.118710 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:39Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.131099 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:39Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.142717 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:39Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.153650 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba10360-c090-4095-8af1-5956450d250d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2447b7b95e023d5250d7f7f92372712537ac3698312da71d6c6c70b23bccf1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb96b198cd151b3cd4336ecbb27650bb3104c4c68d815ec5b911ee0aca16da2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946f83395434468ac3f7b67789108982e4341984166b966312fef9bcb1a9d48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:39Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.165950 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:39Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.168213 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.168247 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.168255 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.168268 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.168277 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:39Z","lastTransitionTime":"2025-11-24T08:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.176585 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:39Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.187459 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:39Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.199254 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:38Z\\\",\\\"message\\\":\\\"2025-11-24T08:35:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_155f4047-4258-4c3d-b5b5-07f36579dc25\\\\n2025-11-24T08:35:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_155f4047-4258-4c3d-b5b5-07f36579dc25 to /host/opt/cni/bin/\\\\n2025-11-24T08:35:53Z [verbose] multus-daemon started\\\\n2025-11-24T08:35:53Z [verbose] Readiness Indicator file check\\\\n2025-11-24T08:36:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:39Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.219773 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:39Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.231437 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:39Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.247491 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:39Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.259653 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:39Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.270258 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.270301 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.270312 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.270327 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.270336 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:39Z","lastTransitionTime":"2025-11-24T08:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.372758 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.372814 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.372826 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.372843 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.372854 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:39Z","lastTransitionTime":"2025-11-24T08:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.474792 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.474840 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.474848 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.474862 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.474870 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:39Z","lastTransitionTime":"2025-11-24T08:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.576630 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.576704 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.576715 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.576729 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.576742 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:39Z","lastTransitionTime":"2025-11-24T08:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.595955 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:39 crc kubenswrapper[4718]: E1124 08:36:39.596097 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.679482 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.679530 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.679539 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.679553 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.679563 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:39Z","lastTransitionTime":"2025-11-24T08:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.782207 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.782258 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.782268 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.782280 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.782289 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:39Z","lastTransitionTime":"2025-11-24T08:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.884374 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.884410 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.884422 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.884439 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.884451 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:39Z","lastTransitionTime":"2025-11-24T08:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.987009 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.987056 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.987069 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.987085 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:39 crc kubenswrapper[4718]: I1124 08:36:39.987096 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:39Z","lastTransitionTime":"2025-11-24T08:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.033340 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zvlvh_811ba3ee-aad5-427c-84f7-fbd3b78255ec/kube-multus/0.log" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.033394 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zvlvh" event={"ID":"811ba3ee-aad5-427c-84f7-fbd3b78255ec","Type":"ContainerStarted","Data":"308a885775330d5e1550c9c740909b7bb1bbff451fa1d5b196af6263de715424"} Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.047684 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:40Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.087777 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:40Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.089579 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.089619 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.089628 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.089641 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.089651 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:40Z","lastTransitionTime":"2025-11-24T08:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.098747 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:40Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.118349 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:40Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.130603 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:40Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.141854 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:40Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.152851 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:40Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.172342 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:19Z\\\",\\\"message\\\":\\\"l-plane-749d76644c-l6tzj\\\\nI1124 08:36:19.363957 6422 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj in node crc\\\\nI1124 08:36:19.363946 6422 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:19.363994 6422 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj after 0 failed attempt(s)\\\\nI1124 08:36:19.363568 6422 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-qbwmc after 0 failed attempt(s)\\\\nI1124 08:36:19.364004 6422 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj\\\\nI1124 08:36:19.364011 6422 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-qbwmc\\\\nF1124 08:36:19.363476 6422 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:36:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:40Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.181182 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:40Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.190563 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:40Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.192099 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.192132 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.192145 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.192160 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.192180 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:40Z","lastTransitionTime":"2025-11-24T08:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.203071 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:40Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.215814 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:40Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.229450 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:40Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.240954 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:40Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.251300 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:40Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.261432 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:40Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.271008 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308a885775330d5e1550c9c740909b7bb1bbff451fa1d5b196af6263de715424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:38Z\\\",\\\"message\\\":\\\"2025-11-24T08:35:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_155f4047-4258-4c3d-b5b5-07f36579dc25\\\\n2025-11-24T08:35:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_155f4047-4258-4c3d-b5b5-07f36579dc25 to /host/opt/cni/bin/\\\\n2025-11-24T08:35:53Z [verbose] multus-daemon started\\\\n2025-11-24T08:35:53Z [verbose] Readiness Indicator file check\\\\n2025-11-24T08:36:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:40Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.279681 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba10360-c090-4095-8af1-5956450d250d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2447b7b95e023d5250d7f7f92372712537ac3698312da71d6c6c70b23bccf1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb96b198cd151b3cd4336ecbb27650bb3104c4c68d815ec5b911ee0aca16da2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946f83395434468ac3f7b67789108982e4341984166b966312fef9bcb1a9d48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:40Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.294388 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.294421 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.294431 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.294443 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.294452 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:40Z","lastTransitionTime":"2025-11-24T08:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.397118 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.397158 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.397166 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.397180 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.397189 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:40Z","lastTransitionTime":"2025-11-24T08:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.499502 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.499551 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.499561 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.499573 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.499582 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:40Z","lastTransitionTime":"2025-11-24T08:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.596809 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.596814 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.597039 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:40 crc kubenswrapper[4718]: E1124 08:36:40.597114 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:40 crc kubenswrapper[4718]: E1124 08:36:40.596957 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:40 crc kubenswrapper[4718]: E1124 08:36:40.597223 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.601060 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.601091 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.601102 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.601117 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.601127 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:40Z","lastTransitionTime":"2025-11-24T08:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.703159 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.703207 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.703218 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.703238 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.703250 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:40Z","lastTransitionTime":"2025-11-24T08:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.804883 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.804927 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.804941 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.804957 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.804988 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:40Z","lastTransitionTime":"2025-11-24T08:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.907823 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.907867 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.907875 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.907890 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:40 crc kubenswrapper[4718]: I1124 08:36:40.907900 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:40Z","lastTransitionTime":"2025-11-24T08:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.010628 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.010675 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.010687 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.010702 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.011065 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:41Z","lastTransitionTime":"2025-11-24T08:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.113237 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.113280 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.113288 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.113300 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.113309 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:41Z","lastTransitionTime":"2025-11-24T08:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.215189 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.215230 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.215242 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.215258 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.215269 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:41Z","lastTransitionTime":"2025-11-24T08:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.317511 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.317560 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.317569 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.317584 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.317593 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:41Z","lastTransitionTime":"2025-11-24T08:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.419886 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.419941 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.419951 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.419963 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.420001 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:41Z","lastTransitionTime":"2025-11-24T08:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.522474 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.522530 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.522540 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.522555 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.522582 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:41Z","lastTransitionTime":"2025-11-24T08:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.595346 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:41 crc kubenswrapper[4718]: E1124 08:36:41.595527 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.624923 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.624962 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.624991 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.625008 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.625020 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:41Z","lastTransitionTime":"2025-11-24T08:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.727167 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.727223 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.727235 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.727252 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.727266 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:41Z","lastTransitionTime":"2025-11-24T08:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.829209 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.829259 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.829289 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.829306 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.829317 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:41Z","lastTransitionTime":"2025-11-24T08:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.931982 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.932027 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.932035 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.932049 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:41 crc kubenswrapper[4718]: I1124 08:36:41.932059 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:41Z","lastTransitionTime":"2025-11-24T08:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.034635 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.034696 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.034708 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.034727 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.034740 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:42Z","lastTransitionTime":"2025-11-24T08:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.137473 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.137539 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.137559 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.137578 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.137590 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:42Z","lastTransitionTime":"2025-11-24T08:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.239659 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.239701 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.239709 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.239721 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.239730 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:42Z","lastTransitionTime":"2025-11-24T08:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.342045 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.342085 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.342094 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.342107 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.342118 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:42Z","lastTransitionTime":"2025-11-24T08:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.444758 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.444811 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.444822 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.444839 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.444851 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:42Z","lastTransitionTime":"2025-11-24T08:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.547163 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.547229 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.547243 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.547260 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.547272 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:42Z","lastTransitionTime":"2025-11-24T08:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.596089 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.596135 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:42 crc kubenswrapper[4718]: E1124 08:36:42.596242 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.596321 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:42 crc kubenswrapper[4718]: E1124 08:36:42.596442 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:42 crc kubenswrapper[4718]: E1124 08:36:42.596635 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.608007 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.649555 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.649598 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.649606 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.649620 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.649629 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:42Z","lastTransitionTime":"2025-11-24T08:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.752096 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.752150 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.752158 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.752171 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.752180 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:42Z","lastTransitionTime":"2025-11-24T08:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.854518 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.854556 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.854564 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.854577 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.854586 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:42Z","lastTransitionTime":"2025-11-24T08:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.956918 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.956996 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.957008 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.957026 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:42 crc kubenswrapper[4718]: I1124 08:36:42.957037 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:42Z","lastTransitionTime":"2025-11-24T08:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.058861 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.058897 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.058909 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.058925 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.058936 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:43Z","lastTransitionTime":"2025-11-24T08:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.164217 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.164287 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.164305 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.164320 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.164336 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:43Z","lastTransitionTime":"2025-11-24T08:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.267290 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.267342 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.267352 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.267364 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.267375 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:43Z","lastTransitionTime":"2025-11-24T08:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.369524 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.369557 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.369564 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.369577 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.369585 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:43Z","lastTransitionTime":"2025-11-24T08:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.471880 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.471923 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.471934 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.471949 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.471960 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:43Z","lastTransitionTime":"2025-11-24T08:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.573948 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.574004 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.574015 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.574029 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.574042 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:43Z","lastTransitionTime":"2025-11-24T08:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.595699 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:43 crc kubenswrapper[4718]: E1124 08:36:43.595855 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.596469 4718 scope.go:117] "RemoveContainer" containerID="d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.677089 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.677124 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.677134 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.677147 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.677157 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:43Z","lastTransitionTime":"2025-11-24T08:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.779543 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.779575 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.779584 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.779597 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.779607 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:43Z","lastTransitionTime":"2025-11-24T08:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.882525 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.882581 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.882609 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.882627 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.882659 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:43Z","lastTransitionTime":"2025-11-24T08:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.985464 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.985516 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.985524 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.985538 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:43 crc kubenswrapper[4718]: I1124 08:36:43.985546 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:43Z","lastTransitionTime":"2025-11-24T08:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.050306 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovnkube-controller/2.log" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.052309 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerStarted","Data":"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622"} Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.053254 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.063228 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.073914 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.083581 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"501ab93e-fd17-48be-a56f-595940aa0fd2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59a58ada5e1c2a88df71a98a5d52c0b32aa00c68b701b5cd1711d39894553754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545bc14d41432ab68f798e54e78640e19d1fc6186a1710e171eb309b504f3e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://545bc14d41432ab68f798e54e78640e19d1fc6186a1710e171eb309b504f3e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.088017 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.088053 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.088062 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.088076 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.088086 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:44Z","lastTransitionTime":"2025-11-24T08:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.096429 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.109038 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.119353 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.136196 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:19Z\\\",\\\"message\\\":\\\"l-plane-749d76644c-l6tzj\\\\nI1124 08:36:19.363957 6422 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj in node crc\\\\nI1124 08:36:19.363946 6422 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:19.363994 6422 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj after 0 failed attempt(s)\\\\nI1124 08:36:19.363568 6422 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-qbwmc after 0 failed attempt(s)\\\\nI1124 08:36:19.364004 6422 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj\\\\nI1124 08:36:19.364011 6422 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-qbwmc\\\\nF1124 08:36:19.363476 6422 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:36:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.150470 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.164860 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.178099 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.190683 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.190724 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.190734 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.190747 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.190756 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:44Z","lastTransitionTime":"2025-11-24T08:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.191835 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba10360-c090-4095-8af1-5956450d250d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2447b7b95e023d5250d7f7f92372712537ac3698312da71d6c6c70b23bccf1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb96b198cd151b3cd4336ecbb27650bb3104c4c68d815ec5b911ee0aca16da2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946f83395434468ac3f7b67789108982e4341984166b966312fef9bcb1a9d48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.204988 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.216085 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.225210 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.236943 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308a885775330d5e1550c9c740909b7bb1bbff451fa1d5b196af6263de715424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:38Z\\\",\\\"message\\\":\\\"2025-11-24T08:35:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_155f4047-4258-4c3d-b5b5-07f36579dc25\\\\n2025-11-24T08:35:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_155f4047-4258-4c3d-b5b5-07f36579dc25 to /host/opt/cni/bin/\\\\n2025-11-24T08:35:53Z [verbose] multus-daemon started\\\\n2025-11-24T08:35:53Z [verbose] Readiness Indicator file check\\\\n2025-11-24T08:36:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.253302 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.264652 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.277049 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.286501 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.293069 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.293099 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.293108 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.293124 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.293134 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:44Z","lastTransitionTime":"2025-11-24T08:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.395604 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.395651 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.395662 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.395676 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.395686 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:44Z","lastTransitionTime":"2025-11-24T08:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.498535 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.498580 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.498589 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.498602 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.498611 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:44Z","lastTransitionTime":"2025-11-24T08:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.595298 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.595331 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.595299 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:44 crc kubenswrapper[4718]: E1124 08:36:44.595444 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:44 crc kubenswrapper[4718]: E1124 08:36:44.595577 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:44 crc kubenswrapper[4718]: E1124 08:36:44.595619 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.600383 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.600415 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.600430 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.600443 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.600453 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:44Z","lastTransitionTime":"2025-11-24T08:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.703611 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.703678 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.703720 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.703778 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.703794 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:44Z","lastTransitionTime":"2025-11-24T08:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.806017 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.806066 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.806076 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.806089 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.806103 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:44Z","lastTransitionTime":"2025-11-24T08:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.908369 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.908409 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.908419 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.908434 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:44 crc kubenswrapper[4718]: I1124 08:36:44.908447 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:44Z","lastTransitionTime":"2025-11-24T08:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.010541 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.010778 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.010845 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.010906 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.010984 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:45Z","lastTransitionTime":"2025-11-24T08:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.056455 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovnkube-controller/3.log" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.056924 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovnkube-controller/2.log" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.058875 4718 generic.go:334] "Generic (PLEG): container finished" podID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerID="14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622" exitCode=1 Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.058910 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerDied","Data":"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622"} Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.058940 4718 scope.go:117] "RemoveContainer" containerID="d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.059622 4718 scope.go:117] "RemoveContainer" containerID="14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622" Nov 24 08:36:45 crc kubenswrapper[4718]: E1124 08:36:45.059761 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.073378 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba10360-c090-4095-8af1-5956450d250d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2447b7b95e023d5250d7f7f92372712537ac3698312da71d6c6c70b23bccf1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb96b198cd151b3cd4336ecbb27650bb3104c4c68d815ec5b911ee0aca16da2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946f83395434468ac3f7b67789108982e4341984166b966312fef9bcb1a9d48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.085566 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.098120 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.108383 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.113355 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.113404 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.113417 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.113433 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.113447 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:45Z","lastTransitionTime":"2025-11-24T08:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.120250 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308a885775330d5e1550c9c740909b7bb1bbff451fa1d5b196af6263de715424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:38Z\\\",\\\"message\\\":\\\"2025-11-24T08:35:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_155f4047-4258-4c3d-b5b5-07f36579dc25\\\\n2025-11-24T08:35:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_155f4047-4258-4c3d-b5b5-07f36579dc25 to /host/opt/cni/bin/\\\\n2025-11-24T08:35:53Z [verbose] multus-daemon started\\\\n2025-11-24T08:35:53Z [verbose] Readiness Indicator file check\\\\n2025-11-24T08:36:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.137783 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.149031 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.161084 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.171405 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.179777 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"501ab93e-fd17-48be-a56f-595940aa0fd2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59a58ada5e1c2a88df71a98a5d52c0b32aa00c68b701b5cd1711d39894553754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545bc14d41432ab68f798e54e78640e19d1fc6186a1710e171eb309b504f3e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://545bc14d41432ab68f798e54e78640e19d1fc6186a1710e171eb309b504f3e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.192529 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.203948 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.214505 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.216609 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.216737 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.216868 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.216982 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.217085 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:45Z","lastTransitionTime":"2025-11-24T08:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.230569 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e09320c3d2eda09a489164b4647a73781c3c139a1421b9de9e38d9d97e3e85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:19Z\\\",\\\"message\\\":\\\"l-plane-749d76644c-l6tzj\\\\nI1124 08:36:19.363957 6422 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj in node crc\\\\nI1124 08:36:19.363946 6422 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 08:36:19.363994 6422 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj after 0 failed attempt(s)\\\\nI1124 08:36:19.363568 6422 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-qbwmc after 0 failed attempt(s)\\\\nI1124 08:36:19.364004 6422 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj\\\\nI1124 08:36:19.364011 6422 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-qbwmc\\\\nF1124 08:36:19.363476 6422 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:36:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:44Z\\\",\\\"message\\\":\\\"org/owner:openshift-service-ca-operator/metrics]} name:Service_openshift-service-ca-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.40:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {2a3fb1a3-a476-4e14-bcf5-fb79af60206a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1124 08:36:44.413961 6772 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z]\\\\nI1124 08:36:44.4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.239501 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.248594 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.259209 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.270453 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.281093 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.319813 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.320145 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.320235 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.320325 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.320403 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:45Z","lastTransitionTime":"2025-11-24T08:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.422925 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.422982 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.422994 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.423010 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.423022 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:45Z","lastTransitionTime":"2025-11-24T08:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.528536 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.528604 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.528648 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.528674 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.528694 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:45Z","lastTransitionTime":"2025-11-24T08:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.595765 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:45 crc kubenswrapper[4718]: E1124 08:36:45.595981 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.615171 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.615223 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.615243 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.615263 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.615275 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:45Z","lastTransitionTime":"2025-11-24T08:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:45 crc kubenswrapper[4718]: E1124 08:36:45.630495 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.634383 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.634426 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.634441 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.634460 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.634471 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:45Z","lastTransitionTime":"2025-11-24T08:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:45 crc kubenswrapper[4718]: E1124 08:36:45.647246 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.651526 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.651562 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.651571 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.651586 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.651596 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:45Z","lastTransitionTime":"2025-11-24T08:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:45 crc kubenswrapper[4718]: E1124 08:36:45.665707 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.670767 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.670811 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.670820 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.670834 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.670848 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:45Z","lastTransitionTime":"2025-11-24T08:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:45 crc kubenswrapper[4718]: E1124 08:36:45.694030 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.698269 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.698308 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.698318 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.698334 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.698345 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:45Z","lastTransitionTime":"2025-11-24T08:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:45 crc kubenswrapper[4718]: E1124 08:36:45.711408 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:45Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:45 crc kubenswrapper[4718]: E1124 08:36:45.711563 4718 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.713407 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.713430 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.713440 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.713453 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.713462 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:45Z","lastTransitionTime":"2025-11-24T08:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.816024 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.816081 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.816094 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.816110 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.816124 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:45Z","lastTransitionTime":"2025-11-24T08:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.918625 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.918663 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.918673 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.918688 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:45 crc kubenswrapper[4718]: I1124 08:36:45.918697 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:45Z","lastTransitionTime":"2025-11-24T08:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.021051 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.021102 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.021116 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.021134 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.021148 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:46Z","lastTransitionTime":"2025-11-24T08:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.063752 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovnkube-controller/3.log" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.067128 4718 scope.go:117] "RemoveContainer" containerID="14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622" Nov 24 08:36:46 crc kubenswrapper[4718]: E1124 08:36:46.067297 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.090994 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.103728 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.116606 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.123818 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.123879 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.123890 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.123912 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.123925 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:46Z","lastTransitionTime":"2025-11-24T08:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.127476 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.138402 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"501ab93e-fd17-48be-a56f-595940aa0fd2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59a58ada5e1c2a88df71a98a5d52c0b32aa00c68b701b5cd1711d39894553754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545bc14d41432ab68f798e54e78640e19d1fc6186a1710e171eb309b504f3e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://545bc14d41432ab68f798e54e78640e19d1fc6186a1710e171eb309b504f3e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.151522 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.166564 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.178750 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.196922 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:44Z\\\",\\\"message\\\":\\\"org/owner:openshift-service-ca-operator/metrics]} name:Service_openshift-service-ca-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.40:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {2a3fb1a3-a476-4e14-bcf5-fb79af60206a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1124 08:36:44.413961 6772 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z]\\\\nI1124 08:36:44.4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:36:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.205626 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.214731 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.226152 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.226554 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.226580 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.226592 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.226606 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.226616 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:46Z","lastTransitionTime":"2025-11-24T08:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.237810 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.249484 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.261238 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba10360-c090-4095-8af1-5956450d250d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2447b7b95e023d5250d7f7f92372712537ac3698312da71d6c6c70b23bccf1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb96b198cd151b3cd4336ecbb27650bb3104c4c68d815ec5b911ee0aca16da2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946f83395434468ac3f7b67789108982e4341984166b966312fef9bcb1a9d48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.271798 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.281523 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.291387 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.303102 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308a885775330d5e1550c9c740909b7bb1bbff451fa1d5b196af6263de715424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:38Z\\\",\\\"message\\\":\\\"2025-11-24T08:35:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_155f4047-4258-4c3d-b5b5-07f36579dc25\\\\n2025-11-24T08:35:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_155f4047-4258-4c3d-b5b5-07f36579dc25 to /host/opt/cni/bin/\\\\n2025-11-24T08:35:53Z [verbose] multus-daemon started\\\\n2025-11-24T08:35:53Z [verbose] Readiness Indicator file check\\\\n2025-11-24T08:36:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:46Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.329162 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.329376 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.329447 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.329574 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.329670 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:46Z","lastTransitionTime":"2025-11-24T08:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.432472 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.432507 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.432518 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.432533 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.432547 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:46Z","lastTransitionTime":"2025-11-24T08:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.535288 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.535321 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.535444 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.535460 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.535786 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:46Z","lastTransitionTime":"2025-11-24T08:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.596231 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.596265 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.596250 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:46 crc kubenswrapper[4718]: E1124 08:36:46.596375 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:46 crc kubenswrapper[4718]: E1124 08:36:46.596411 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:46 crc kubenswrapper[4718]: E1124 08:36:46.596463 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.638837 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.639106 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.639203 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.639279 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.639363 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:46Z","lastTransitionTime":"2025-11-24T08:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.741561 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.741603 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.741614 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.741630 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.741642 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:46Z","lastTransitionTime":"2025-11-24T08:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.844107 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.844171 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.844187 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.844214 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.844234 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:46Z","lastTransitionTime":"2025-11-24T08:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.946171 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.946405 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.946505 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.946581 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:46 crc kubenswrapper[4718]: I1124 08:36:46.946685 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:46Z","lastTransitionTime":"2025-11-24T08:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.048682 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.048721 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.048731 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.048747 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.048759 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:47Z","lastTransitionTime":"2025-11-24T08:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.151422 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.152071 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.152096 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.152119 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.152132 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:47Z","lastTransitionTime":"2025-11-24T08:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.254281 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.254309 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.254319 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.254332 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.254341 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:47Z","lastTransitionTime":"2025-11-24T08:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.356720 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.356758 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.356768 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.356781 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.356790 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:47Z","lastTransitionTime":"2025-11-24T08:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.459139 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.459175 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.459183 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.459196 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.459205 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:47Z","lastTransitionTime":"2025-11-24T08:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.561748 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.561792 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.561802 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.561817 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.561828 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:47Z","lastTransitionTime":"2025-11-24T08:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.595423 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:47 crc kubenswrapper[4718]: E1124 08:36:47.595730 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.663882 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.663926 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.663934 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.663946 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.663955 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:47Z","lastTransitionTime":"2025-11-24T08:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.766566 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.766606 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.766616 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.766630 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.766640 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:47Z","lastTransitionTime":"2025-11-24T08:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.869935 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.870000 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.870019 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.870038 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.870053 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:47Z","lastTransitionTime":"2025-11-24T08:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.973041 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.973105 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.973117 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.973136 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:47 crc kubenswrapper[4718]: I1124 08:36:47.973147 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:47Z","lastTransitionTime":"2025-11-24T08:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.075445 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.075944 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.075955 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.075993 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.076005 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:48Z","lastTransitionTime":"2025-11-24T08:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.178137 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.178186 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.178200 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.178224 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.178238 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:48Z","lastTransitionTime":"2025-11-24T08:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.281193 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.281509 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.281599 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.281695 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.281769 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:48Z","lastTransitionTime":"2025-11-24T08:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.384567 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.385141 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.385235 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.385311 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.385387 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:48Z","lastTransitionTime":"2025-11-24T08:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.488286 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.488363 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.488379 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.488398 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.488435 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:48Z","lastTransitionTime":"2025-11-24T08:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.591354 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.591438 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.591452 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.591472 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.591484 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:48Z","lastTransitionTime":"2025-11-24T08:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.595910 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.595914 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.596074 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:48 crc kubenswrapper[4718]: E1124 08:36:48.596185 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:48 crc kubenswrapper[4718]: E1124 08:36:48.596320 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:48 crc kubenswrapper[4718]: E1124 08:36:48.596424 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.609177 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba10360-c090-4095-8af1-5956450d250d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2447b7b95e023d5250d7f7f92372712537ac3698312da71d6c6c70b23bccf1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb96b198cd151b3cd4336ecbb27650bb3104c4c68d815ec5b911ee0aca16da2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946f83395434468ac3f7b67789108982e4341984166b966312fef9bcb1a9d48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.623261 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.636238 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.646241 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.658679 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308a885775330d5e1550c9c740909b7bb1bbff451fa1d5b196af6263de715424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:38Z\\\",\\\"message\\\":\\\"2025-11-24T08:35:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_155f4047-4258-4c3d-b5b5-07f36579dc25\\\\n2025-11-24T08:35:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_155f4047-4258-4c3d-b5b5-07f36579dc25 to /host/opt/cni/bin/\\\\n2025-11-24T08:35:53Z [verbose] multus-daemon started\\\\n2025-11-24T08:35:53Z [verbose] Readiness Indicator file check\\\\n2025-11-24T08:36:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.679202 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.693453 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.693505 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.693520 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.693540 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.693553 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:48Z","lastTransitionTime":"2025-11-24T08:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.694088 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.711952 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.723778 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.735535 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"501ab93e-fd17-48be-a56f-595940aa0fd2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59a58ada5e1c2a88df71a98a5d52c0b32aa00c68b701b5cd1711d39894553754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545bc14d41432ab68f798e54e78640e19d1fc6186a1710e171eb309b504f3e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://545bc14d41432ab68f798e54e78640e19d1fc6186a1710e171eb309b504f3e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.749541 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.762693 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.777159 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.796402 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.796551 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.796576 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.796600 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.796617 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:48Z","lastTransitionTime":"2025-11-24T08:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.798798 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:44Z\\\",\\\"message\\\":\\\"org/owner:openshift-service-ca-operator/metrics]} name:Service_openshift-service-ca-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.40:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {2a3fb1a3-a476-4e14-bcf5-fb79af60206a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1124 08:36:44.413961 6772 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z]\\\\nI1124 08:36:44.4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:36:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.811130 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.830300 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.844353 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.860357 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.878653 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:48Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.899264 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.899317 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.899328 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.899348 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:48 crc kubenswrapper[4718]: I1124 08:36:48.899361 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:48Z","lastTransitionTime":"2025-11-24T08:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.003140 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.003192 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.003203 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.003223 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.003240 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:49Z","lastTransitionTime":"2025-11-24T08:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.106486 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.106543 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.106555 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.106576 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.106587 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:49Z","lastTransitionTime":"2025-11-24T08:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.210145 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.210228 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.210260 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.210294 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.210323 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:49Z","lastTransitionTime":"2025-11-24T08:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.313824 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.313871 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.313884 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.313907 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.313924 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:49Z","lastTransitionTime":"2025-11-24T08:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.417943 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.418019 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.418033 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.418053 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.418065 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:49Z","lastTransitionTime":"2025-11-24T08:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.522081 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.522142 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.522164 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.522194 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.522216 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:49Z","lastTransitionTime":"2025-11-24T08:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.595533 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:49 crc kubenswrapper[4718]: E1124 08:36:49.595879 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.626872 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.626926 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.626939 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.626996 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.627028 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:49Z","lastTransitionTime":"2025-11-24T08:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.733403 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.733458 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.733477 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.733504 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.733524 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:49Z","lastTransitionTime":"2025-11-24T08:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.836181 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.836272 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.836294 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.836327 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.836348 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:49Z","lastTransitionTime":"2025-11-24T08:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.939411 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.939471 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.939484 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.939510 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:49 crc kubenswrapper[4718]: I1124 08:36:49.939525 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:49Z","lastTransitionTime":"2025-11-24T08:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.042293 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.042333 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.042341 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.042354 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.042363 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:50Z","lastTransitionTime":"2025-11-24T08:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.145407 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.145486 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.145501 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.145520 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.145532 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:50Z","lastTransitionTime":"2025-11-24T08:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.247686 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.248245 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.248326 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.248403 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.248618 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:50Z","lastTransitionTime":"2025-11-24T08:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.351038 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.351086 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.351096 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.351110 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.351121 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:50Z","lastTransitionTime":"2025-11-24T08:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.453013 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.453064 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.453076 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.453092 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.453103 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:50Z","lastTransitionTime":"2025-11-24T08:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.555394 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.555430 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.555441 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.555455 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.555467 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:50Z","lastTransitionTime":"2025-11-24T08:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.595930 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.595938 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:50 crc kubenswrapper[4718]: E1124 08:36:50.596337 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.595992 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:50 crc kubenswrapper[4718]: E1124 08:36:50.596417 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:50 crc kubenswrapper[4718]: E1124 08:36:50.596284 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.658088 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.658148 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.658157 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.658172 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.658183 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:50Z","lastTransitionTime":"2025-11-24T08:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.762989 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.763045 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.763056 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.763074 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.763091 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:50Z","lastTransitionTime":"2025-11-24T08:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.865555 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.865600 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.865611 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.865626 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.865636 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:50Z","lastTransitionTime":"2025-11-24T08:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.969584 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.969619 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.969629 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.969644 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:50 crc kubenswrapper[4718]: I1124 08:36:50.969653 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:50Z","lastTransitionTime":"2025-11-24T08:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.071814 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.071869 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.071885 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.071900 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.071911 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:51Z","lastTransitionTime":"2025-11-24T08:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.174335 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.174376 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.174385 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.174399 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.174413 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:51Z","lastTransitionTime":"2025-11-24T08:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.277207 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.277258 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.277269 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.277322 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.277333 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:51Z","lastTransitionTime":"2025-11-24T08:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.379824 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.379878 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.379889 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.379906 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.379917 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:51Z","lastTransitionTime":"2025-11-24T08:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.483245 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.483303 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.483316 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.483339 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.483350 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:51Z","lastTransitionTime":"2025-11-24T08:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.586551 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.586597 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.586606 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.586625 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.586636 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:51Z","lastTransitionTime":"2025-11-24T08:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.596221 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:51 crc kubenswrapper[4718]: E1124 08:36:51.596467 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.688401 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.688462 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.688472 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.688485 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.688497 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:51Z","lastTransitionTime":"2025-11-24T08:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.791663 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.791725 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.791736 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.791762 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.791780 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:51Z","lastTransitionTime":"2025-11-24T08:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.893750 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.893792 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.893801 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.893847 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.893856 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:51Z","lastTransitionTime":"2025-11-24T08:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.995783 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.995838 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.995848 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.995864 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:51 crc kubenswrapper[4718]: I1124 08:36:51.995874 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:51Z","lastTransitionTime":"2025-11-24T08:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.098460 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.098509 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.098517 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.098531 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.098540 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:52Z","lastTransitionTime":"2025-11-24T08:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.200584 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.200626 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.200636 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.200652 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.200663 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:52Z","lastTransitionTime":"2025-11-24T08:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.303224 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.303262 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.303271 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.303285 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.303294 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:52Z","lastTransitionTime":"2025-11-24T08:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.405718 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.405765 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.405776 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.405793 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.405805 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:52Z","lastTransitionTime":"2025-11-24T08:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.461141 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:36:52 crc kubenswrapper[4718]: E1124 08:36:52.461262 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.461236521 +0000 UTC m=+148.577527415 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.461295 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.461340 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.461362 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.461385 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:52 crc kubenswrapper[4718]: E1124 08:36:52.461454 4718 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 08:36:52 crc kubenswrapper[4718]: E1124 08:36:52.461480 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 08:36:52 crc kubenswrapper[4718]: E1124 08:36:52.461501 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 08:36:52 crc kubenswrapper[4718]: E1124 08:36:52.461507 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.461493868 +0000 UTC m=+148.577784772 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 08:36:52 crc kubenswrapper[4718]: E1124 08:36:52.461514 4718 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:36:52 crc kubenswrapper[4718]: E1124 08:36:52.461540 4718 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 08:36:52 crc kubenswrapper[4718]: E1124 08:36:52.461548 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.461538429 +0000 UTC m=+148.577829333 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:36:52 crc kubenswrapper[4718]: E1124 08:36:52.461680 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.461612391 +0000 UTC m=+148.577903295 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 08:36:52 crc kubenswrapper[4718]: E1124 08:36:52.461788 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 08:36:52 crc kubenswrapper[4718]: E1124 08:36:52.461810 4718 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 08:36:52 crc kubenswrapper[4718]: E1124 08:36:52.461824 4718 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:36:52 crc kubenswrapper[4718]: E1124 08:36:52.461867 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.461855407 +0000 UTC m=+148.578146311 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.508538 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.508590 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.508603 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.508620 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.508634 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:52Z","lastTransitionTime":"2025-11-24T08:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.595664 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.595719 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.595679 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:52 crc kubenswrapper[4718]: E1124 08:36:52.595802 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:52 crc kubenswrapper[4718]: E1124 08:36:52.595874 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:52 crc kubenswrapper[4718]: E1124 08:36:52.595958 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.610850 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.610900 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.610910 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.610922 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.610932 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:52Z","lastTransitionTime":"2025-11-24T08:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.712877 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.712926 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.712934 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.712949 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.712962 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:52Z","lastTransitionTime":"2025-11-24T08:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.815799 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.815835 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.815844 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.815857 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.815868 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:52Z","lastTransitionTime":"2025-11-24T08:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.919080 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.919181 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.919195 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.919223 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:52 crc kubenswrapper[4718]: I1124 08:36:52.919242 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:52Z","lastTransitionTime":"2025-11-24T08:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.022266 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.022316 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.022328 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.022345 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.022361 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:53Z","lastTransitionTime":"2025-11-24T08:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.125529 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.125578 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.125587 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.125604 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.125616 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:53Z","lastTransitionTime":"2025-11-24T08:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.228818 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.228865 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.228877 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.228894 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.228908 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:53Z","lastTransitionTime":"2025-11-24T08:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.332775 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.332825 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.332836 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.332854 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.332864 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:53Z","lastTransitionTime":"2025-11-24T08:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.436560 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.436618 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.436635 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.436659 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.436675 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:53Z","lastTransitionTime":"2025-11-24T08:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.539873 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.539923 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.539935 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.539950 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.539990 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:53Z","lastTransitionTime":"2025-11-24T08:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.595587 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:53 crc kubenswrapper[4718]: E1124 08:36:53.595761 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.643029 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.643077 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.643085 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.643101 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.643112 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:53Z","lastTransitionTime":"2025-11-24T08:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.747318 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.747378 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.747389 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.747409 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.747433 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:53Z","lastTransitionTime":"2025-11-24T08:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.850393 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.850820 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.850830 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.850843 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.850852 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:53Z","lastTransitionTime":"2025-11-24T08:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.954570 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.954653 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.954673 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.954703 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:53 crc kubenswrapper[4718]: I1124 08:36:53.954726 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:53Z","lastTransitionTime":"2025-11-24T08:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.057902 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.058177 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.058189 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.058245 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.058255 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:54Z","lastTransitionTime":"2025-11-24T08:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.161490 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.161526 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.161536 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.161550 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.161561 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:54Z","lastTransitionTime":"2025-11-24T08:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.264116 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.264174 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.264182 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.264198 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.264225 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:54Z","lastTransitionTime":"2025-11-24T08:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.366654 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.366690 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.366700 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.366711 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.366720 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:54Z","lastTransitionTime":"2025-11-24T08:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.468963 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.469023 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.469032 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.469047 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.469156 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:54Z","lastTransitionTime":"2025-11-24T08:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.571270 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.571334 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.571346 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.571365 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.571377 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:54Z","lastTransitionTime":"2025-11-24T08:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.596123 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:54 crc kubenswrapper[4718]: E1124 08:36:54.596254 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.596425 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:54 crc kubenswrapper[4718]: E1124 08:36:54.596470 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.596653 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:54 crc kubenswrapper[4718]: E1124 08:36:54.596902 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.673817 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.673864 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.673880 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.673900 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.673911 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:54Z","lastTransitionTime":"2025-11-24T08:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.776489 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.776532 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.776539 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.776555 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.776566 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:54Z","lastTransitionTime":"2025-11-24T08:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.879219 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.879263 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.879277 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.879294 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.879305 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:54Z","lastTransitionTime":"2025-11-24T08:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.983161 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.983236 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.983259 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.983294 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:54 crc kubenswrapper[4718]: I1124 08:36:54.983319 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:54Z","lastTransitionTime":"2025-11-24T08:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.086550 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.086620 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.086639 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.086669 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.086689 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:55Z","lastTransitionTime":"2025-11-24T08:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.188573 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.188635 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.188648 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.188664 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.188676 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:55Z","lastTransitionTime":"2025-11-24T08:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.291835 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.291892 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.291905 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.291928 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.291945 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:55Z","lastTransitionTime":"2025-11-24T08:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.394190 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.394227 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.394235 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.394248 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.394259 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:55Z","lastTransitionTime":"2025-11-24T08:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.497882 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.497939 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.497960 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.498025 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.498050 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:55Z","lastTransitionTime":"2025-11-24T08:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.596079 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:55 crc kubenswrapper[4718]: E1124 08:36:55.596311 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.601124 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.601197 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.601212 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.601233 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.601250 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:55Z","lastTransitionTime":"2025-11-24T08:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.705092 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.705623 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.705750 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.705885 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.706027 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:55Z","lastTransitionTime":"2025-11-24T08:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.809822 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.810326 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.810447 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.810565 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.810667 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:55Z","lastTransitionTime":"2025-11-24T08:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.914443 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.914499 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.914511 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.914531 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:55 crc kubenswrapper[4718]: I1124 08:36:55.914548 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:55Z","lastTransitionTime":"2025-11-24T08:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.017793 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.017845 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.017857 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.017877 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.017891 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:56Z","lastTransitionTime":"2025-11-24T08:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.104083 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.104148 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.104163 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.104194 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.104210 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:56Z","lastTransitionTime":"2025-11-24T08:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:56 crc kubenswrapper[4718]: E1124 08:36:56.121718 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.126102 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.126163 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.126177 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.126194 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.126207 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:56Z","lastTransitionTime":"2025-11-24T08:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:56 crc kubenswrapper[4718]: E1124 08:36:56.141130 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.146209 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.146278 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.146291 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.146313 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.146328 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:56Z","lastTransitionTime":"2025-11-24T08:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:56 crc kubenswrapper[4718]: E1124 08:36:56.161815 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.166410 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.166487 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.166511 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.166542 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.166563 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:56Z","lastTransitionTime":"2025-11-24T08:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:56 crc kubenswrapper[4718]: E1124 08:36:56.180279 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.184686 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.184817 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.184882 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.184954 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.185048 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:56Z","lastTransitionTime":"2025-11-24T08:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:56 crc kubenswrapper[4718]: E1124 08:36:56.197845 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:56Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:56 crc kubenswrapper[4718]: E1124 08:36:56.198274 4718 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.200226 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.200371 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.200454 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.200524 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.200591 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:56Z","lastTransitionTime":"2025-11-24T08:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.302817 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.303069 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.303146 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.303251 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.303319 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:56Z","lastTransitionTime":"2025-11-24T08:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.405264 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.405318 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.405333 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.405351 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.405363 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:56Z","lastTransitionTime":"2025-11-24T08:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.507326 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.507364 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.507374 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.507390 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.507402 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:56Z","lastTransitionTime":"2025-11-24T08:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.596183 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.596286 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:56 crc kubenswrapper[4718]: E1124 08:36:56.596331 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.596183 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:56 crc kubenswrapper[4718]: E1124 08:36:56.596428 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:56 crc kubenswrapper[4718]: E1124 08:36:56.596454 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.609707 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.609944 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.610033 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.610134 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.610196 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:56Z","lastTransitionTime":"2025-11-24T08:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.712406 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.712443 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.712451 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.712465 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.712475 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:56Z","lastTransitionTime":"2025-11-24T08:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.814840 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.814908 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.814921 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.814937 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.814949 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:56Z","lastTransitionTime":"2025-11-24T08:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.917305 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.917609 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.917761 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.917850 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:56 crc kubenswrapper[4718]: I1124 08:36:56.917909 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:56Z","lastTransitionTime":"2025-11-24T08:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.019681 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.020009 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.020137 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.020277 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.020393 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:57Z","lastTransitionTime":"2025-11-24T08:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.123183 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.123226 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.123234 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.123248 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.123257 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:57Z","lastTransitionTime":"2025-11-24T08:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.225409 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.225474 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.225496 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.225511 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.225521 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:57Z","lastTransitionTime":"2025-11-24T08:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.327690 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.327735 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.327748 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.327764 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.327774 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:57Z","lastTransitionTime":"2025-11-24T08:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.429505 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.429542 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.429552 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.429568 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.429579 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:57Z","lastTransitionTime":"2025-11-24T08:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.532232 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.532283 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.532291 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.532304 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.532315 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:57Z","lastTransitionTime":"2025-11-24T08:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.595378 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:57 crc kubenswrapper[4718]: E1124 08:36:57.595889 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.634495 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.634546 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.634557 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.634570 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.634579 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:57Z","lastTransitionTime":"2025-11-24T08:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.736697 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.736802 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.736814 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.736828 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.736838 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:57Z","lastTransitionTime":"2025-11-24T08:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.838741 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.838775 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.838803 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.838815 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.838824 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:57Z","lastTransitionTime":"2025-11-24T08:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.944601 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.944667 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.944677 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.944693 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:57 crc kubenswrapper[4718]: I1124 08:36:57.944703 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:57Z","lastTransitionTime":"2025-11-24T08:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.046540 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.046579 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.046588 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.046603 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.046613 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:58Z","lastTransitionTime":"2025-11-24T08:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.148340 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.148380 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.148390 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.148407 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.148417 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:58Z","lastTransitionTime":"2025-11-24T08:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.251212 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.251433 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.251560 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.251643 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.251720 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:58Z","lastTransitionTime":"2025-11-24T08:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.354726 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.354772 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.354781 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.354796 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.354807 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:58Z","lastTransitionTime":"2025-11-24T08:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.457471 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.457505 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.457516 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.457532 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.457543 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:58Z","lastTransitionTime":"2025-11-24T08:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.559867 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.559899 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.559908 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.559920 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.559929 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:58Z","lastTransitionTime":"2025-11-24T08:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.595465 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.595487 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.595634 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:36:58 crc kubenswrapper[4718]: E1124 08:36:58.595705 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:36:58 crc kubenswrapper[4718]: E1124 08:36:58.595997 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:36:58 crc kubenswrapper[4718]: E1124 08:36:58.596044 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.609673 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.623591 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.634683 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.651575 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:44Z\\\",\\\"message\\\":\\\"org/owner:openshift-service-ca-operator/metrics]} name:Service_openshift-service-ca-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.40:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {2a3fb1a3-a476-4e14-bcf5-fb79af60206a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1124 08:36:44.413961 6772 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z]\\\\nI1124 08:36:44.4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:36:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.661856 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.661884 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.661891 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.661903 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.661911 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:58Z","lastTransitionTime":"2025-11-24T08:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.662796 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.673443 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.683174 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"501ab93e-fd17-48be-a56f-595940aa0fd2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59a58ada5e1c2a88df71a98a5d52c0b32aa00c68b701b5cd1711d39894553754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545bc14d41432ab68f798e54e78640e19d1fc6186a1710e171eb309b504f3e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://545bc14d41432ab68f798e54e78640e19d1fc6186a1710e171eb309b504f3e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.703026 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.713829 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.727814 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.741515 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.752228 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.763621 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.765449 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.765494 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.765503 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.765518 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.765531 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:58Z","lastTransitionTime":"2025-11-24T08:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.774749 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308a885775330d5e1550c9c740909b7bb1bbff451fa1d5b196af6263de715424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:38Z\\\",\\\"message\\\":\\\"2025-11-24T08:35:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_155f4047-4258-4c3d-b5b5-07f36579dc25\\\\n2025-11-24T08:35:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_155f4047-4258-4c3d-b5b5-07f36579dc25 to /host/opt/cni/bin/\\\\n2025-11-24T08:35:53Z [verbose] multus-daemon started\\\\n2025-11-24T08:35:53Z [verbose] Readiness Indicator file check\\\\n2025-11-24T08:36:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.785839 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba10360-c090-4095-8af1-5956450d250d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2447b7b95e023d5250d7f7f92372712537ac3698312da71d6c6c70b23bccf1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb96b198cd151b3cd4336ecbb27650bb3104c4c68d815ec5b911ee0aca16da2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946f83395434468ac3f7b67789108982e4341984166b966312fef9bcb1a9d48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.797626 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.810012 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.820013 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.836671 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:58Z is after 2025-08-24T17:21:41Z" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.868443 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.868703 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.868805 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.868872 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.868938 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:58Z","lastTransitionTime":"2025-11-24T08:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.971129 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.971400 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.971465 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.971526 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:58 crc kubenswrapper[4718]: I1124 08:36:58.971606 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:58Z","lastTransitionTime":"2025-11-24T08:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.073682 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.073947 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.074040 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.074106 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.074165 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:59Z","lastTransitionTime":"2025-11-24T08:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.176597 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.176630 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.176639 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.176652 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.176660 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:59Z","lastTransitionTime":"2025-11-24T08:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.278716 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.278760 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.278774 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.278793 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.278806 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:59Z","lastTransitionTime":"2025-11-24T08:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.381860 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.381904 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.381934 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.381955 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.381980 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:59Z","lastTransitionTime":"2025-11-24T08:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.484804 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.484851 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.484866 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.484888 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.484904 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:59Z","lastTransitionTime":"2025-11-24T08:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.586952 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.587251 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.587355 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.587426 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.587484 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:59Z","lastTransitionTime":"2025-11-24T08:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.595394 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:36:59 crc kubenswrapper[4718]: E1124 08:36:59.595528 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.689785 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.689820 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.689831 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.689848 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.689857 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:59Z","lastTransitionTime":"2025-11-24T08:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.792603 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.792632 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.792640 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.792653 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.792662 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:59Z","lastTransitionTime":"2025-11-24T08:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.895461 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.895509 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.895525 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.895540 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.895550 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:59Z","lastTransitionTime":"2025-11-24T08:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.998188 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.998228 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.998238 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.998252 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:36:59 crc kubenswrapper[4718]: I1124 08:36:59.998259 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:36:59Z","lastTransitionTime":"2025-11-24T08:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.103621 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.103666 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.103697 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.103717 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.103731 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:00Z","lastTransitionTime":"2025-11-24T08:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.206013 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.206040 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.206049 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.206062 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.206070 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:00Z","lastTransitionTime":"2025-11-24T08:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.309232 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.309272 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.309283 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.309297 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.309307 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:00Z","lastTransitionTime":"2025-11-24T08:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.411566 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.411614 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.411631 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.411647 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.411659 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:00Z","lastTransitionTime":"2025-11-24T08:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.513941 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.514012 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.514024 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.514041 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.514052 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:00Z","lastTransitionTime":"2025-11-24T08:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.595627 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.595627 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.595666 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:00 crc kubenswrapper[4718]: E1124 08:37:00.596118 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:00 crc kubenswrapper[4718]: E1124 08:37:00.596146 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:00 crc kubenswrapper[4718]: E1124 08:37:00.596216 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.596440 4718 scope.go:117] "RemoveContainer" containerID="14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622" Nov 24 08:37:00 crc kubenswrapper[4718]: E1124 08:37:00.596597 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.617010 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.617068 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.617080 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.617097 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.617109 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:00Z","lastTransitionTime":"2025-11-24T08:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.719930 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.720015 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.720027 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.720045 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.720056 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:00Z","lastTransitionTime":"2025-11-24T08:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.822213 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.822245 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.822252 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.822265 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.822274 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:00Z","lastTransitionTime":"2025-11-24T08:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.924503 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.924560 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.924571 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.924588 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:00 crc kubenswrapper[4718]: I1124 08:37:00.924601 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:00Z","lastTransitionTime":"2025-11-24T08:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.027525 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.027581 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.027607 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.027639 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.027654 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:01Z","lastTransitionTime":"2025-11-24T08:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.130094 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.130133 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.130142 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.130156 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.130164 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:01Z","lastTransitionTime":"2025-11-24T08:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.232438 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.232759 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.232858 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.232990 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.233088 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:01Z","lastTransitionTime":"2025-11-24T08:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.335752 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.335793 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.335803 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.335851 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.335863 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:01Z","lastTransitionTime":"2025-11-24T08:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.437735 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.437775 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.437788 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.437802 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.437810 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:01Z","lastTransitionTime":"2025-11-24T08:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.539729 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.539825 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.539838 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.539857 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.539867 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:01Z","lastTransitionTime":"2025-11-24T08:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.596212 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:01 crc kubenswrapper[4718]: E1124 08:37:01.596532 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.643523 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.643722 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.643740 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.643763 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.643825 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:01Z","lastTransitionTime":"2025-11-24T08:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.745858 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.745888 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.745895 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.745907 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.745917 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:01Z","lastTransitionTime":"2025-11-24T08:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.848820 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.848871 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.848891 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.848914 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.848930 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:01Z","lastTransitionTime":"2025-11-24T08:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.951019 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.951075 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.951085 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.951097 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:01 crc kubenswrapper[4718]: I1124 08:37:01.951105 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:01Z","lastTransitionTime":"2025-11-24T08:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.053726 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.053931 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.054057 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.054137 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.054199 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:02Z","lastTransitionTime":"2025-11-24T08:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.156502 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.156549 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.156560 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.156576 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.156587 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:02Z","lastTransitionTime":"2025-11-24T08:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.258936 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.259001 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.259011 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.259025 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.259035 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:02Z","lastTransitionTime":"2025-11-24T08:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.361415 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.361452 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.361464 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.361480 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.361492 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:02Z","lastTransitionTime":"2025-11-24T08:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.464252 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.464298 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.464308 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.464323 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.464333 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:02Z","lastTransitionTime":"2025-11-24T08:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.566691 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.566743 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.566756 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.566775 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.566787 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:02Z","lastTransitionTime":"2025-11-24T08:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.596343 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.596441 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:02 crc kubenswrapper[4718]: E1124 08:37:02.596507 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.596530 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:02 crc kubenswrapper[4718]: E1124 08:37:02.596657 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:02 crc kubenswrapper[4718]: E1124 08:37:02.596732 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.669592 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.669648 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.669663 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.669684 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.669699 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:02Z","lastTransitionTime":"2025-11-24T08:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.771782 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.771864 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.771876 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.771897 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.771910 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:02Z","lastTransitionTime":"2025-11-24T08:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.873544 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.873584 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.873594 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.873607 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.873616 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:02Z","lastTransitionTime":"2025-11-24T08:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.975951 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.976016 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.976028 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.976046 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:02 crc kubenswrapper[4718]: I1124 08:37:02.976062 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:02Z","lastTransitionTime":"2025-11-24T08:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.079391 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.079442 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.079453 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.079468 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.079480 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:03Z","lastTransitionTime":"2025-11-24T08:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.182763 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.182802 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.182810 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.182824 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.182833 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:03Z","lastTransitionTime":"2025-11-24T08:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.286324 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.286387 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.286398 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.286416 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.286431 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:03Z","lastTransitionTime":"2025-11-24T08:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.389781 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.389819 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.389830 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.389850 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.389869 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:03Z","lastTransitionTime":"2025-11-24T08:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.492534 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.492568 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.492578 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.492590 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.492598 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:03Z","lastTransitionTime":"2025-11-24T08:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.595340 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:03 crc kubenswrapper[4718]: E1124 08:37:03.595500 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.595672 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.595720 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.595732 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.595752 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.595765 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:03Z","lastTransitionTime":"2025-11-24T08:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.698736 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.698774 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.698783 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.698853 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.698865 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:03Z","lastTransitionTime":"2025-11-24T08:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.801303 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.801348 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.801359 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.801374 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.801386 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:03Z","lastTransitionTime":"2025-11-24T08:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.903959 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.904341 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.904462 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.904570 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:03 crc kubenswrapper[4718]: I1124 08:37:03.904655 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:03Z","lastTransitionTime":"2025-11-24T08:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.007729 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.007779 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.007794 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.007811 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.007823 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:04Z","lastTransitionTime":"2025-11-24T08:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.110077 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.110111 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.110122 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.110137 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.110147 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:04Z","lastTransitionTime":"2025-11-24T08:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.212589 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.212630 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.212638 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.212652 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.212661 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:04Z","lastTransitionTime":"2025-11-24T08:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.314700 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.314736 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.314747 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.314761 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.314771 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:04Z","lastTransitionTime":"2025-11-24T08:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.416548 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.416588 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.416598 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.416611 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.416620 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:04Z","lastTransitionTime":"2025-11-24T08:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.518725 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.518768 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.518779 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.518794 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.518804 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:04Z","lastTransitionTime":"2025-11-24T08:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.596321 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:04 crc kubenswrapper[4718]: E1124 08:37:04.596462 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.596684 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:04 crc kubenswrapper[4718]: E1124 08:37:04.596753 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.596889 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:04 crc kubenswrapper[4718]: E1124 08:37:04.596943 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.620730 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.620776 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.620789 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.620806 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.620818 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:04Z","lastTransitionTime":"2025-11-24T08:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.723640 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.723703 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.723719 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.723737 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.723749 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:04Z","lastTransitionTime":"2025-11-24T08:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.826273 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.826320 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.826338 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.826355 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.826367 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:04Z","lastTransitionTime":"2025-11-24T08:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.928861 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.928900 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.928909 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.928924 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:04 crc kubenswrapper[4718]: I1124 08:37:04.928934 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:04Z","lastTransitionTime":"2025-11-24T08:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.031130 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.031171 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.031182 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.031194 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.031204 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:05Z","lastTransitionTime":"2025-11-24T08:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.133144 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.133181 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.133193 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.133211 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.133222 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:05Z","lastTransitionTime":"2025-11-24T08:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.234846 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.234902 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.234912 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.234926 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.234935 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:05Z","lastTransitionTime":"2025-11-24T08:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.337125 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.337159 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.337168 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.337180 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.337188 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:05Z","lastTransitionTime":"2025-11-24T08:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.439449 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.439542 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.439554 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.439571 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.439583 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:05Z","lastTransitionTime":"2025-11-24T08:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.542388 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.542457 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.542469 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.542490 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.542507 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:05Z","lastTransitionTime":"2025-11-24T08:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.596106 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:05 crc kubenswrapper[4718]: E1124 08:37:05.596486 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.645253 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.645291 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.645299 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.645313 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.645323 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:05Z","lastTransitionTime":"2025-11-24T08:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.748169 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.748220 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.748230 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.748243 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.748254 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:05Z","lastTransitionTime":"2025-11-24T08:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.850645 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.850680 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.850690 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.850703 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.850712 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:05Z","lastTransitionTime":"2025-11-24T08:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.952681 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.952717 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.952726 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.952740 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:05 crc kubenswrapper[4718]: I1124 08:37:05.952750 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:05Z","lastTransitionTime":"2025-11-24T08:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.055780 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.055818 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.055827 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.055842 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.055853 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:06Z","lastTransitionTime":"2025-11-24T08:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.158488 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.158548 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.158560 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.158582 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.158599 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:06Z","lastTransitionTime":"2025-11-24T08:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.210179 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs\") pod \"network-metrics-daemon-ctdmz\" (UID: \"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\") " pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:06 crc kubenswrapper[4718]: E1124 08:37:06.210346 4718 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 08:37:06 crc kubenswrapper[4718]: E1124 08:37:06.210392 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs podName:3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97 nodeName:}" failed. No retries permitted until 2025-11-24 08:38:10.210378513 +0000 UTC m=+162.326669417 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs") pod "network-metrics-daemon-ctdmz" (UID: "3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.261448 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.261735 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.261832 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.261925 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.262053 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:06Z","lastTransitionTime":"2025-11-24T08:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.366255 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.366597 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.366685 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.366777 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.366851 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:06Z","lastTransitionTime":"2025-11-24T08:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.469432 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.469741 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.469829 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.469928 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.470031 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:06Z","lastTransitionTime":"2025-11-24T08:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.574308 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.574640 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.574734 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.574819 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.574915 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:06Z","lastTransitionTime":"2025-11-24T08:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.591470 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.591536 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.591558 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.591583 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.591601 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:06Z","lastTransitionTime":"2025-11-24T08:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.595532 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.595663 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.595532 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:06 crc kubenswrapper[4718]: E1124 08:37:06.595705 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:06 crc kubenswrapper[4718]: E1124 08:37:06.595823 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:06 crc kubenswrapper[4718]: E1124 08:37:06.595924 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:06 crc kubenswrapper[4718]: E1124 08:37:06.613669 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:06Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.620107 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.620397 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.620513 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.620627 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.620732 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:06Z","lastTransitionTime":"2025-11-24T08:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:06 crc kubenswrapper[4718]: E1124 08:37:06.634368 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:06Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.639934 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.640045 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.640070 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.640097 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.640115 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:06Z","lastTransitionTime":"2025-11-24T08:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:06 crc kubenswrapper[4718]: E1124 08:37:06.653083 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:06Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.658947 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.659225 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.659288 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.659373 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.659488 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:06Z","lastTransitionTime":"2025-11-24T08:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:06 crc kubenswrapper[4718]: E1124 08:37:06.674740 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:06Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.685019 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.685088 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.685104 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.685131 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.685152 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:06Z","lastTransitionTime":"2025-11-24T08:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:06 crc kubenswrapper[4718]: E1124 08:37:06.703619 4718 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T08:37:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b19cdd33-5845-4248-9fcf-160eb228d941\\\",\\\"systemUUID\\\":\\\"8f96cf3c-132c-4425-a289-ea01a722de47\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:06Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:06 crc kubenswrapper[4718]: E1124 08:37:06.704018 4718 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.705748 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.705813 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.705826 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.705853 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.705869 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:06Z","lastTransitionTime":"2025-11-24T08:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.808686 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.808727 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.808738 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.808753 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.808762 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:06Z","lastTransitionTime":"2025-11-24T08:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.911182 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.911607 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.911721 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.911817 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:06 crc kubenswrapper[4718]: I1124 08:37:06.911905 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:06Z","lastTransitionTime":"2025-11-24T08:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.014485 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.014566 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.014584 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.014610 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.014628 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:07Z","lastTransitionTime":"2025-11-24T08:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.117829 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.117888 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.117903 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.117926 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.117940 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:07Z","lastTransitionTime":"2025-11-24T08:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.221019 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.221064 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.221074 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.221091 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.221105 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:07Z","lastTransitionTime":"2025-11-24T08:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.324216 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.324470 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.324538 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.324650 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.324728 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:07Z","lastTransitionTime":"2025-11-24T08:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.428100 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.428148 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.428157 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.428176 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.428191 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:07Z","lastTransitionTime":"2025-11-24T08:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.531773 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.532285 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.532415 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.532529 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.532611 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:07Z","lastTransitionTime":"2025-11-24T08:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.595748 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:07 crc kubenswrapper[4718]: E1124 08:37:07.596192 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.635802 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.635848 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.635856 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.635873 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.635884 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:07Z","lastTransitionTime":"2025-11-24T08:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.738785 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.738856 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.738871 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.738897 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.738915 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:07Z","lastTransitionTime":"2025-11-24T08:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.842128 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.842468 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.842602 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.842727 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.842851 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:07Z","lastTransitionTime":"2025-11-24T08:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.945998 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.946040 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.946050 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.946068 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:07 crc kubenswrapper[4718]: I1124 08:37:07.946079 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:07Z","lastTransitionTime":"2025-11-24T08:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.048843 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.048887 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.048901 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.048919 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.048929 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:08Z","lastTransitionTime":"2025-11-24T08:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.151693 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.151766 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.151782 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.151805 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.151820 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:08Z","lastTransitionTime":"2025-11-24T08:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.254360 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.254435 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.254456 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.254488 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.254504 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:08Z","lastTransitionTime":"2025-11-24T08:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.356355 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.356387 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.356396 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.356408 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.356417 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:08Z","lastTransitionTime":"2025-11-24T08:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.458396 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.458433 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.458442 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.458457 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.458466 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:08Z","lastTransitionTime":"2025-11-24T08:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.560226 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.560265 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.560275 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.560287 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.560297 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:08Z","lastTransitionTime":"2025-11-24T08:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.596475 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.596702 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.596724 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:08 crc kubenswrapper[4718]: E1124 08:37:08.596807 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:08 crc kubenswrapper[4718]: E1124 08:37:08.597014 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:08 crc kubenswrapper[4718]: E1124 08:37:08.597165 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.618027 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81591bdb-5ea4-4469-9387-2dc9d9130475\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1fe1235266d4d90edcd1a8d20ac9b26efb2e907d9268d38c253bbe1c04a8fd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68cfe075ee9d4b044bb919ee9dd91eb83fc468041c803c19f407fcd5a729dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22a37432ab19038a24dff84c3c990acac0b1880a4c3f9220f07a3a8dda3c9f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d53715169ddffae5ecef53ef04f491fb66b0c652a6043cdc97e58eadad16b9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9767796ae8b6473b655c4e7d2125475bd9116f02f70d0518669c3465763dfb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1929e603b22268fb0811cd528f241c52d76b606afc53c91d2eb2e28261c1028\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a1548d541a25877851a8c75f546c1e0226dccfd820d54bc69a4471ea7c090db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d8e81a4cdf4a29afb6927f2150577d7b4bae445d7401b52738916969b1638ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.633719 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://424cda1953aae4a474a034741a686c9f11484e7e7fd5690737c7cf353d0bfdd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.647679 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f25c7267-0621-49ab-91e3-08d7d85c815d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://465da6bb61bb110d145e709db0cdde499cd29bac9777f79c5baf6e550009e72f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae03762e98927828f30280d3e58716f4542d3a302fb4d3afe90c90130427aabd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88870fc4f4bc5b9e16624da3ad7077073f148e2c2da1f8fc33fd62a8efaec54a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://45c1a84f491afafe49f326e2353fdc625e980f92f2b509671ba73cac8085ce9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fecdeb808142c8106a7824ae54f3f700835505b4b7520547e5724e3aeaaf656\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2888a32f4cae85ddbdf65f2ee329d83260cdf8a55e3b9a9b1d9d5b3605089367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e45573865b725ac2e891e9f157ff9b1ddcde9b41a65076b0b216ac229c40889\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pdxxl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qbwmc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.657036 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ctdmz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.665137 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.665180 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.665193 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.665210 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.665223 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:08Z","lastTransitionTime":"2025-11-24T08:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.673201 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2zxtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41d62746-e30a-4e15-a353-c2b4800bdae1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50ff3b438e947b37781280a07dbc31685e6ff5ecbde49baac3609d7a585da11f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xzpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2zxtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.683531 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d59d24a4-3e05-4bad-a5ec-4e2689007e54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d661da550d32004c8028b594728fb27325f008d868dd2aa9b0c922afcddd1adb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://430920e159e96edbedbe7a54111c4a8c15641e629edb40be60e895a2b3b681a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qln9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:36:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l6tzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.696856 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"501ab93e-fd17-48be-a56f-595940aa0fd2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59a58ada5e1c2a88df71a98a5d52c0b32aa00c68b701b5cd1711d39894553754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545bc14d41432ab68f798e54e78640e19d1fc6186a1710e171eb309b504f3e77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://545bc14d41432ab68f798e54e78640e19d1fc6186a1710e171eb309b504f3e77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.717504 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"374e926f-1eec-4cf5-9b51-9ab1f9242fa0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c77eeef64d340ac6d5671eb9001666add23c7d9beaf80422e541bce84b62d6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f065428cb7a35ed5955a3abb5e434fc53b6ada7d7f5b934ce46cd77ae529ac59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67c2caa6d6e68103c602e2a82e62ff26a7d58e8da636c88d23aa082f83d7eea3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://560449d4b01c54042cfcd6d206713f8d3c30ea303534c5c6727f73322b3cf87b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5af713ab06957685e13bc65f3b8dac710b50f7139322d416fe03a9a83a83f77b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"file observer\\\\nW1124 08:35:47.617425 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1124 08:35:47.617579 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 08:35:47.618816 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-965343327/tls.crt::/tmp/serving-cert-965343327/tls.key\\\\\\\"\\\\nI1124 08:35:47.991733 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1124 08:35:47.994886 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1124 08:35:47.994943 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1124 08:35:47.995012 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1124 08:35:47.995044 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1124 08:35:47.998445 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1124 08:35:47.998470 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998474 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1124 08:35:47.998479 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1124 08:35:47.998482 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1124 08:35:47.998485 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 08:35:47.998488 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 08:35:47.998523 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1124 08:35:48.000219 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415419e08345661d7ebd84b4c5442210edf306a6c2fdebb06068616f478c46da\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89b5e5ce20f6fc64b39203ce6a444852d1f8f329df09a03bc80924f59e432163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.731793 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5b0512-bd06-4f85-89cd-f2b43f2eb878\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b4b024eea5f2b9b7646d1fe81ab787c834b36ef6d566c1c27863409767e274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56c255c7ec20a56e6fd58dfd1a88b79c7f1ea3f63adeee181ef1f9e5e362328c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://206084a4082b9d8d91145f45b8868f8ae46acb0b3a8bd680e8ba0de060bd7913\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2271b83f5d6c0911b50811e1c442fd7c08518789231b91c73d47133f444da23a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.744214 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89887d07-87db-4f4f-a6fa-3cd34e814131\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b964c7bd046c0ef6bfbe9c884494e047f7a099930fb5fd56602458a721848c5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rhqzp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-575gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.762552 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:44Z\\\",\\\"message\\\":\\\"org/owner:openshift-service-ca-operator/metrics]} name:Service_openshift-service-ca-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.40:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {2a3fb1a3-a476-4e14-bcf5-fb79af60206a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1124 08:36:44.413961 6772 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:36:44Z is after 2025-08-24T17:21:41Z]\\\\nI1124 08:36:44.4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:36:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tq976\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lk4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.767938 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.768020 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.768038 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.768061 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.768079 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:08Z","lastTransitionTime":"2025-11-24T08:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.777897 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.793556 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.809671 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.824456 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba10360-c090-4095-8af1-5956450d250d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2447b7b95e023d5250d7f7f92372712537ac3698312da71d6c6c70b23bccf1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb96b198cd151b3cd4336ecbb27650bb3104c4c68d815ec5b911ee0aca16da2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946f83395434468ac3f7b67789108982e4341984166b966312fef9bcb1a9d48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://817eec49734167604b6be2dcf0693293be42268e428e2e2ac5dfa7ea2d622a0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T08:35:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.841221 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2874c708870634637ad21c443913f123c3c27d00989ba60a33c330a6c9dbd178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e06e453be3b826d41c6e369160222e0b32fac579b2d21b9aab78c0d58cce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.859678 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://433035f1ca3eea3482bc345af50193fa592fa6c8540e8220bfb076bcdafa65a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.870722 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.870765 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.870775 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.870793 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.870805 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:08Z","lastTransitionTime":"2025-11-24T08:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.875109 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8cfq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae67d599-f070-4cc9-a934-d546e1d84e7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb26dd32777e8dc265ca872531b87fa804ddd55c7e4e1f39ff824e9d4db0eeac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:35:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wzs5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8cfq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.891948 4718 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zvlvh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"811ba3ee-aad5-427c-84f7-fbd3b78255ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:35:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T08:36:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://308a885775330d5e1550c9c740909b7bb1bbff451fa1d5b196af6263de715424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T08:36:38Z\\\",\\\"message\\\":\\\"2025-11-24T08:35:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_155f4047-4258-4c3d-b5b5-07f36579dc25\\\\n2025-11-24T08:35:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_155f4047-4258-4c3d-b5b5-07f36579dc25 to /host/opt/cni/bin/\\\\n2025-11-24T08:35:53Z [verbose] multus-daemon started\\\\n2025-11-24T08:35:53Z [verbose] Readiness Indicator file check\\\\n2025-11-24T08:36:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T08:35:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T08:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qf62t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T08:35:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zvlvh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T08:37:08Z is after 2025-08-24T17:21:41Z" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.974336 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.974417 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.974440 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.974473 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:08 crc kubenswrapper[4718]: I1124 08:37:08.974496 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:08Z","lastTransitionTime":"2025-11-24T08:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.077432 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.077484 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.077500 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.077520 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.077535 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:09Z","lastTransitionTime":"2025-11-24T08:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.179481 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.179553 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.179566 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.179585 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.179604 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:09Z","lastTransitionTime":"2025-11-24T08:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.281899 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.281948 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.281960 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.281994 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.282008 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:09Z","lastTransitionTime":"2025-11-24T08:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.384116 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.384155 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.384165 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.384182 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.384193 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:09Z","lastTransitionTime":"2025-11-24T08:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.486525 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.486561 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.486570 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.486585 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.486594 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:09Z","lastTransitionTime":"2025-11-24T08:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.589258 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.589309 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.589321 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.589338 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.589351 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:09Z","lastTransitionTime":"2025-11-24T08:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.595602 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:09 crc kubenswrapper[4718]: E1124 08:37:09.595715 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.733283 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.733322 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.733333 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.733348 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.733359 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:09Z","lastTransitionTime":"2025-11-24T08:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.835056 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.835096 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.835107 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.835122 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.835134 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:09Z","lastTransitionTime":"2025-11-24T08:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.937736 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.937781 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.937789 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.937804 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:09 crc kubenswrapper[4718]: I1124 08:37:09.937815 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:09Z","lastTransitionTime":"2025-11-24T08:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.039843 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.039882 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.039893 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.039911 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.039924 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:10Z","lastTransitionTime":"2025-11-24T08:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.141618 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.141655 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.141664 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.141678 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.141688 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:10Z","lastTransitionTime":"2025-11-24T08:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.243797 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.243834 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.243845 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.243858 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.243867 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:10Z","lastTransitionTime":"2025-11-24T08:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.345584 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.345622 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.345635 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.345652 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.345662 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:10Z","lastTransitionTime":"2025-11-24T08:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.448058 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.448099 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.448117 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.448133 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.448145 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:10Z","lastTransitionTime":"2025-11-24T08:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.550615 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.550685 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.550700 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.550731 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.550749 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:10Z","lastTransitionTime":"2025-11-24T08:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.596439 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.596485 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.596455 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:10 crc kubenswrapper[4718]: E1124 08:37:10.596613 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:10 crc kubenswrapper[4718]: E1124 08:37:10.596722 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:10 crc kubenswrapper[4718]: E1124 08:37:10.596850 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.653828 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.653910 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.653935 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.653992 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.654016 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:10Z","lastTransitionTime":"2025-11-24T08:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.757448 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.757496 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.757512 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.757530 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.757543 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:10Z","lastTransitionTime":"2025-11-24T08:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.860765 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.861102 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.861234 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.861361 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.861479 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:10Z","lastTransitionTime":"2025-11-24T08:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.963917 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.964011 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.964034 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.964061 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:10 crc kubenswrapper[4718]: I1124 08:37:10.964083 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:10Z","lastTransitionTime":"2025-11-24T08:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.066287 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.066318 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.066326 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.066338 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.066348 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:11Z","lastTransitionTime":"2025-11-24T08:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.169265 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.169324 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.169344 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.169372 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.169393 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:11Z","lastTransitionTime":"2025-11-24T08:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.272224 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.272280 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.272299 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.272326 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.272346 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:11Z","lastTransitionTime":"2025-11-24T08:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.375933 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.376065 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.376096 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.376133 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.376158 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:11Z","lastTransitionTime":"2025-11-24T08:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.479022 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.479120 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.479154 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.479184 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.479203 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:11Z","lastTransitionTime":"2025-11-24T08:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.583336 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.583440 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.583464 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.583498 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.583516 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:11Z","lastTransitionTime":"2025-11-24T08:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.595842 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:11 crc kubenswrapper[4718]: E1124 08:37:11.596355 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.597093 4718 scope.go:117] "RemoveContainer" containerID="14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622" Nov 24 08:37:11 crc kubenswrapper[4718]: E1124 08:37:11.597253 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2lk4b_openshift-ovn-kubernetes(692d15f5-2875-47c6-92e3-3c99bfd6b7ea)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.686822 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.686888 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.686901 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.686926 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.686941 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:11Z","lastTransitionTime":"2025-11-24T08:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.789947 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.790011 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.790023 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.790037 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.790046 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:11Z","lastTransitionTime":"2025-11-24T08:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.893991 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.894032 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.894043 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.894056 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.894064 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:11Z","lastTransitionTime":"2025-11-24T08:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.996902 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.996947 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.996959 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.996991 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:11 crc kubenswrapper[4718]: I1124 08:37:11.997004 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:11Z","lastTransitionTime":"2025-11-24T08:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.099903 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.099942 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.099953 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.099999 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.100014 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:12Z","lastTransitionTime":"2025-11-24T08:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.202253 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.202498 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.202570 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.202644 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.202714 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:12Z","lastTransitionTime":"2025-11-24T08:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.304999 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.305045 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.305057 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.305072 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.305081 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:12Z","lastTransitionTime":"2025-11-24T08:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.408548 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.408579 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.408588 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.408602 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.408611 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:12Z","lastTransitionTime":"2025-11-24T08:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.511778 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.511843 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.511860 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.511929 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.511950 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:12Z","lastTransitionTime":"2025-11-24T08:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.595695 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.595814 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.595909 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:12 crc kubenswrapper[4718]: E1124 08:37:12.595923 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:12 crc kubenswrapper[4718]: E1124 08:37:12.596091 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:12 crc kubenswrapper[4718]: E1124 08:37:12.596174 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.613960 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.614020 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.614031 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.614045 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.614054 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:12Z","lastTransitionTime":"2025-11-24T08:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.716302 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.716333 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.716342 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.716374 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.716383 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:12Z","lastTransitionTime":"2025-11-24T08:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.820427 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.820481 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.820493 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.820513 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.820529 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:12Z","lastTransitionTime":"2025-11-24T08:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.923031 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.923059 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.923068 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.923082 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:12 crc kubenswrapper[4718]: I1124 08:37:12.923091 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:12Z","lastTransitionTime":"2025-11-24T08:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.027428 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.027502 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.027525 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.027552 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.027569 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:13Z","lastTransitionTime":"2025-11-24T08:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.130773 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.130819 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.130832 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.130848 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.130859 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:13Z","lastTransitionTime":"2025-11-24T08:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.233918 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.233986 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.234001 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.234016 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.234027 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:13Z","lastTransitionTime":"2025-11-24T08:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.337471 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.337509 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.337517 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.337531 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.337542 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:13Z","lastTransitionTime":"2025-11-24T08:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.440008 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.440092 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.440124 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.440153 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.440180 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:13Z","lastTransitionTime":"2025-11-24T08:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.542616 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.542658 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.542669 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.542686 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.542698 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:13Z","lastTransitionTime":"2025-11-24T08:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.596423 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:13 crc kubenswrapper[4718]: E1124 08:37:13.596700 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.645405 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.645472 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.645489 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.645513 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.645528 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:13Z","lastTransitionTime":"2025-11-24T08:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.748869 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.748913 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.748925 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.748940 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.748951 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:13Z","lastTransitionTime":"2025-11-24T08:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.850915 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.850958 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.850982 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.850999 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.851010 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:13Z","lastTransitionTime":"2025-11-24T08:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.953240 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.953307 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.953326 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.953350 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:13 crc kubenswrapper[4718]: I1124 08:37:13.953365 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:13Z","lastTransitionTime":"2025-11-24T08:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.056595 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.056638 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.056647 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.056662 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.056675 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:14Z","lastTransitionTime":"2025-11-24T08:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.158794 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.158829 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.158839 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.158852 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.158861 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:14Z","lastTransitionTime":"2025-11-24T08:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.261233 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.261274 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.261285 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.261307 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.261317 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:14Z","lastTransitionTime":"2025-11-24T08:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.363788 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.363820 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.363828 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.363841 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.363850 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:14Z","lastTransitionTime":"2025-11-24T08:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.469047 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.469079 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.469087 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.469099 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.469107 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:14Z","lastTransitionTime":"2025-11-24T08:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.572411 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.572453 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.572464 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.572480 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.572491 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:14Z","lastTransitionTime":"2025-11-24T08:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.595484 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:14 crc kubenswrapper[4718]: E1124 08:37:14.595575 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.595489 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.595655 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:14 crc kubenswrapper[4718]: E1124 08:37:14.595803 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:14 crc kubenswrapper[4718]: E1124 08:37:14.595954 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.675195 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.675264 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.675288 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.675315 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.675338 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:14Z","lastTransitionTime":"2025-11-24T08:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.778114 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.778186 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.778209 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.778237 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.778261 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:14Z","lastTransitionTime":"2025-11-24T08:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.880315 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.880357 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.880367 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.880381 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.880391 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:14Z","lastTransitionTime":"2025-11-24T08:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.982703 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.982741 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.982752 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.982767 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:14 crc kubenswrapper[4718]: I1124 08:37:14.982777 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:14Z","lastTransitionTime":"2025-11-24T08:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.085346 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.085396 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.085406 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.085423 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.085434 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:15Z","lastTransitionTime":"2025-11-24T08:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.188521 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.188563 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.188573 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.188587 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.188596 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:15Z","lastTransitionTime":"2025-11-24T08:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.290614 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.290659 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.290670 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.290686 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.290697 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:15Z","lastTransitionTime":"2025-11-24T08:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.392680 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.392721 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.392730 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.392745 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.392756 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:15Z","lastTransitionTime":"2025-11-24T08:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.495281 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.495337 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.495353 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.495374 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.495392 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:15Z","lastTransitionTime":"2025-11-24T08:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.595650 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:15 crc kubenswrapper[4718]: E1124 08:37:15.595901 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.597645 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.597680 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.597692 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.597706 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.597716 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:15Z","lastTransitionTime":"2025-11-24T08:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.700693 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.700753 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.700765 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.700777 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.700785 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:15Z","lastTransitionTime":"2025-11-24T08:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.803374 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.803419 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.803431 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.803446 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.803456 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:15Z","lastTransitionTime":"2025-11-24T08:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.906748 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.906795 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.906807 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.906828 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:15 crc kubenswrapper[4718]: I1124 08:37:15.906840 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:15Z","lastTransitionTime":"2025-11-24T08:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.009802 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.009848 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.009856 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.009874 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.009883 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:16Z","lastTransitionTime":"2025-11-24T08:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.112740 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.112773 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.112791 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.112807 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.113212 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:16Z","lastTransitionTime":"2025-11-24T08:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.216122 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.216152 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.216161 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.216174 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.216183 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:16Z","lastTransitionTime":"2025-11-24T08:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.318929 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.319012 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.319025 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.319041 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.319074 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:16Z","lastTransitionTime":"2025-11-24T08:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.421729 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.421785 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.421804 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.421827 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.421844 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:16Z","lastTransitionTime":"2025-11-24T08:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.523746 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.523997 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.524007 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.524021 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.524030 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:16Z","lastTransitionTime":"2025-11-24T08:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.595705 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.595725 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.596000 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:16 crc kubenswrapper[4718]: E1124 08:37:16.596091 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:16 crc kubenswrapper[4718]: E1124 08:37:16.596154 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:16 crc kubenswrapper[4718]: E1124 08:37:16.596212 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.626435 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.626474 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.626483 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.626496 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.626505 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:16Z","lastTransitionTime":"2025-11-24T08:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.728458 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.728484 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.728492 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.728503 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.728513 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:16Z","lastTransitionTime":"2025-11-24T08:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.756058 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.756092 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.756102 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.756116 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.756128 4718 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T08:37:16Z","lastTransitionTime":"2025-11-24T08:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.797089 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc"] Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.798257 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.802480 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.802728 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.802747 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.803044 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.825565 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-8cfq9" podStartSLOduration=89.825542841 podStartE2EDuration="1m29.825542841s" podCreationTimestamp="2025-11-24 08:35:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:16.825429648 +0000 UTC m=+108.941720552" watchObservedRunningTime="2025-11-24 08:37:16.825542841 +0000 UTC m=+108.941833775" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.840471 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-zvlvh" podStartSLOduration=88.840451705 podStartE2EDuration="1m28.840451705s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:16.840365203 +0000 UTC m=+108.956656157" watchObservedRunningTime="2025-11-24 08:37:16.840451705 +0000 UTC m=+108.956742609" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.867962 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=59.867945311 podStartE2EDuration="59.867945311s" podCreationTimestamp="2025-11-24 08:36:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:16.85087924 +0000 UTC m=+108.967170144" watchObservedRunningTime="2025-11-24 08:37:16.867945311 +0000 UTC m=+108.984236215" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.881508 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-qbwmc" podStartSLOduration=88.881492719 podStartE2EDuration="1m28.881492719s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:16.881059047 +0000 UTC m=+108.997349981" watchObservedRunningTime="2025-11-24 08:37:16.881492719 +0000 UTC m=+108.997783623" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.930300 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=88.930275637 podStartE2EDuration="1m28.930275637s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:16.929783674 +0000 UTC m=+109.046074578" watchObservedRunningTime="2025-11-24 08:37:16.930275637 +0000 UTC m=+109.046566541" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.932878 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4670f36d-465f-4160-b879-a46b98836cc5-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4c9xc\" (UID: \"4670f36d-465f-4160-b879-a46b98836cc5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.932934 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/4670f36d-465f-4160-b879-a46b98836cc5-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4c9xc\" (UID: \"4670f36d-465f-4160-b879-a46b98836cc5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.932961 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4670f36d-465f-4160-b879-a46b98836cc5-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4c9xc\" (UID: \"4670f36d-465f-4160-b879-a46b98836cc5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.932993 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4670f36d-465f-4160-b879-a46b98836cc5-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4c9xc\" (UID: \"4670f36d-465f-4160-b879-a46b98836cc5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.933012 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/4670f36d-465f-4160-b879-a46b98836cc5-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4c9xc\" (UID: \"4670f36d-465f-4160-b879-a46b98836cc5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.970507 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=87.970479878 podStartE2EDuration="1m27.970479878s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:16.970226912 +0000 UTC m=+109.086517826" watchObservedRunningTime="2025-11-24 08:37:16.970479878 +0000 UTC m=+109.086770772" Nov 24 08:37:16 crc kubenswrapper[4718]: I1124 08:37:16.981563 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podStartSLOduration=88.981544361 podStartE2EDuration="1m28.981544361s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:16.981497309 +0000 UTC m=+109.097788213" watchObservedRunningTime="2025-11-24 08:37:16.981544361 +0000 UTC m=+109.097835265" Nov 24 08:37:17 crc kubenswrapper[4718]: I1124 08:37:17.023322 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-2zxtm" podStartSLOduration=89.023300563 podStartE2EDuration="1m29.023300563s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:17.013240748 +0000 UTC m=+109.129531652" watchObservedRunningTime="2025-11-24 08:37:17.023300563 +0000 UTC m=+109.139591467" Nov 24 08:37:17 crc kubenswrapper[4718]: I1124 08:37:17.023433 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l6tzj" podStartSLOduration=88.023428397 podStartE2EDuration="1m28.023428397s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:17.022654976 +0000 UTC m=+109.138945880" watchObservedRunningTime="2025-11-24 08:37:17.023428397 +0000 UTC m=+109.139719321" Nov 24 08:37:17 crc kubenswrapper[4718]: I1124 08:37:17.033643 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/4670f36d-465f-4160-b879-a46b98836cc5-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4c9xc\" (UID: \"4670f36d-465f-4160-b879-a46b98836cc5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" Nov 24 08:37:17 crc kubenswrapper[4718]: I1124 08:37:17.033727 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4670f36d-465f-4160-b879-a46b98836cc5-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4c9xc\" (UID: \"4670f36d-465f-4160-b879-a46b98836cc5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" Nov 24 08:37:17 crc kubenswrapper[4718]: I1124 08:37:17.033762 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4670f36d-465f-4160-b879-a46b98836cc5-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4c9xc\" (UID: \"4670f36d-465f-4160-b879-a46b98836cc5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" Nov 24 08:37:17 crc kubenswrapper[4718]: I1124 08:37:17.033788 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/4670f36d-465f-4160-b879-a46b98836cc5-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4c9xc\" (UID: \"4670f36d-465f-4160-b879-a46b98836cc5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" Nov 24 08:37:17 crc kubenswrapper[4718]: I1124 08:37:17.033856 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4670f36d-465f-4160-b879-a46b98836cc5-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4c9xc\" (UID: \"4670f36d-465f-4160-b879-a46b98836cc5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" Nov 24 08:37:17 crc kubenswrapper[4718]: I1124 08:37:17.034217 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/4670f36d-465f-4160-b879-a46b98836cc5-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4c9xc\" (UID: \"4670f36d-465f-4160-b879-a46b98836cc5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" Nov 24 08:37:17 crc kubenswrapper[4718]: I1124 08:37:17.034675 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/4670f36d-465f-4160-b879-a46b98836cc5-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4c9xc\" (UID: \"4670f36d-465f-4160-b879-a46b98836cc5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" Nov 24 08:37:17 crc kubenswrapper[4718]: I1124 08:37:17.035387 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4670f36d-465f-4160-b879-a46b98836cc5-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4c9xc\" (UID: \"4670f36d-465f-4160-b879-a46b98836cc5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" Nov 24 08:37:17 crc kubenswrapper[4718]: I1124 08:37:17.040334 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4670f36d-465f-4160-b879-a46b98836cc5-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4c9xc\" (UID: \"4670f36d-465f-4160-b879-a46b98836cc5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" Nov 24 08:37:17 crc kubenswrapper[4718]: I1124 08:37:17.049566 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=35.049547506 podStartE2EDuration="35.049547506s" podCreationTimestamp="2025-11-24 08:36:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:17.049256009 +0000 UTC m=+109.165546913" watchObservedRunningTime="2025-11-24 08:37:17.049547506 +0000 UTC m=+109.165838410" Nov 24 08:37:17 crc kubenswrapper[4718]: I1124 08:37:17.052178 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4670f36d-465f-4160-b879-a46b98836cc5-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4c9xc\" (UID: \"4670f36d-465f-4160-b879-a46b98836cc5\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" Nov 24 08:37:17 crc kubenswrapper[4718]: I1124 08:37:17.063963 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=89.063946126 podStartE2EDuration="1m29.063946126s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:17.063870865 +0000 UTC m=+109.180161789" watchObservedRunningTime="2025-11-24 08:37:17.063946126 +0000 UTC m=+109.180237030" Nov 24 08:37:17 crc kubenswrapper[4718]: I1124 08:37:17.117138 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" Nov 24 08:37:17 crc kubenswrapper[4718]: I1124 08:37:17.159053 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" event={"ID":"4670f36d-465f-4160-b879-a46b98836cc5","Type":"ContainerStarted","Data":"4cb36047ef2349c15b5b7ec4b233d58489dd2d8021e9ee32f3e38ffbcca34de0"} Nov 24 08:37:17 crc kubenswrapper[4718]: I1124 08:37:17.595951 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:17 crc kubenswrapper[4718]: E1124 08:37:17.596126 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:18 crc kubenswrapper[4718]: I1124 08:37:18.164464 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" event={"ID":"4670f36d-465f-4160-b879-a46b98836cc5","Type":"ContainerStarted","Data":"7d9108d5cc1029f9d8593ce6eb0828992f054ef9340ccb7de224f82d18c2b7d6"} Nov 24 08:37:18 crc kubenswrapper[4718]: I1124 08:37:18.184957 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4c9xc" podStartSLOduration=90.184938107 podStartE2EDuration="1m30.184938107s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:18.184473785 +0000 UTC m=+110.300764719" watchObservedRunningTime="2025-11-24 08:37:18.184938107 +0000 UTC m=+110.301229011" Nov 24 08:37:18 crc kubenswrapper[4718]: I1124 08:37:18.596299 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:18 crc kubenswrapper[4718]: I1124 08:37:18.596391 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:18 crc kubenswrapper[4718]: I1124 08:37:18.596455 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:18 crc kubenswrapper[4718]: E1124 08:37:18.597792 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:18 crc kubenswrapper[4718]: E1124 08:37:18.597919 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:18 crc kubenswrapper[4718]: E1124 08:37:18.598100 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:19 crc kubenswrapper[4718]: I1124 08:37:19.595564 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:19 crc kubenswrapper[4718]: E1124 08:37:19.596376 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:20 crc kubenswrapper[4718]: I1124 08:37:20.595885 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:20 crc kubenswrapper[4718]: E1124 08:37:20.596577 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:20 crc kubenswrapper[4718]: I1124 08:37:20.596239 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:20 crc kubenswrapper[4718]: E1124 08:37:20.596874 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:20 crc kubenswrapper[4718]: I1124 08:37:20.596107 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:20 crc kubenswrapper[4718]: E1124 08:37:20.597137 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:21 crc kubenswrapper[4718]: I1124 08:37:21.595654 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:21 crc kubenswrapper[4718]: E1124 08:37:21.595869 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:22 crc kubenswrapper[4718]: I1124 08:37:22.596245 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:22 crc kubenswrapper[4718]: I1124 08:37:22.596314 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:22 crc kubenswrapper[4718]: E1124 08:37:22.596377 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:22 crc kubenswrapper[4718]: E1124 08:37:22.596466 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:22 crc kubenswrapper[4718]: I1124 08:37:22.596645 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:22 crc kubenswrapper[4718]: E1124 08:37:22.596716 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:23 crc kubenswrapper[4718]: I1124 08:37:23.596346 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:23 crc kubenswrapper[4718]: E1124 08:37:23.596726 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:24 crc kubenswrapper[4718]: I1124 08:37:24.596126 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:24 crc kubenswrapper[4718]: E1124 08:37:24.596249 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:24 crc kubenswrapper[4718]: I1124 08:37:24.596271 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:24 crc kubenswrapper[4718]: E1124 08:37:24.596333 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:24 crc kubenswrapper[4718]: I1124 08:37:24.596379 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:24 crc kubenswrapper[4718]: E1124 08:37:24.596729 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:25 crc kubenswrapper[4718]: I1124 08:37:25.189346 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zvlvh_811ba3ee-aad5-427c-84f7-fbd3b78255ec/kube-multus/1.log" Nov 24 08:37:25 crc kubenswrapper[4718]: I1124 08:37:25.189782 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zvlvh_811ba3ee-aad5-427c-84f7-fbd3b78255ec/kube-multus/0.log" Nov 24 08:37:25 crc kubenswrapper[4718]: I1124 08:37:25.189822 4718 generic.go:334] "Generic (PLEG): container finished" podID="811ba3ee-aad5-427c-84f7-fbd3b78255ec" containerID="308a885775330d5e1550c9c740909b7bb1bbff451fa1d5b196af6263de715424" exitCode=1 Nov 24 08:37:25 crc kubenswrapper[4718]: I1124 08:37:25.189854 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zvlvh" event={"ID":"811ba3ee-aad5-427c-84f7-fbd3b78255ec","Type":"ContainerDied","Data":"308a885775330d5e1550c9c740909b7bb1bbff451fa1d5b196af6263de715424"} Nov 24 08:37:25 crc kubenswrapper[4718]: I1124 08:37:25.189885 4718 scope.go:117] "RemoveContainer" containerID="1c7bb0f2597372072cb71d5fe88a7340edaea0e20d5d4ba67780572f1ae07342" Nov 24 08:37:25 crc kubenswrapper[4718]: I1124 08:37:25.190252 4718 scope.go:117] "RemoveContainer" containerID="308a885775330d5e1550c9c740909b7bb1bbff451fa1d5b196af6263de715424" Nov 24 08:37:25 crc kubenswrapper[4718]: E1124 08:37:25.190393 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-zvlvh_openshift-multus(811ba3ee-aad5-427c-84f7-fbd3b78255ec)\"" pod="openshift-multus/multus-zvlvh" podUID="811ba3ee-aad5-427c-84f7-fbd3b78255ec" Nov 24 08:37:25 crc kubenswrapper[4718]: I1124 08:37:25.595925 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:25 crc kubenswrapper[4718]: E1124 08:37:25.596458 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:26 crc kubenswrapper[4718]: I1124 08:37:26.194644 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zvlvh_811ba3ee-aad5-427c-84f7-fbd3b78255ec/kube-multus/1.log" Nov 24 08:37:26 crc kubenswrapper[4718]: I1124 08:37:26.595770 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:26 crc kubenswrapper[4718]: I1124 08:37:26.595773 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:26 crc kubenswrapper[4718]: I1124 08:37:26.595773 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:26 crc kubenswrapper[4718]: E1124 08:37:26.596078 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:26 crc kubenswrapper[4718]: E1124 08:37:26.595943 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:26 crc kubenswrapper[4718]: E1124 08:37:26.596388 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:26 crc kubenswrapper[4718]: I1124 08:37:26.596716 4718 scope.go:117] "RemoveContainer" containerID="14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622" Nov 24 08:37:27 crc kubenswrapper[4718]: I1124 08:37:27.199623 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovnkube-controller/3.log" Nov 24 08:37:27 crc kubenswrapper[4718]: I1124 08:37:27.202067 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerStarted","Data":"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9"} Nov 24 08:37:27 crc kubenswrapper[4718]: I1124 08:37:27.203393 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:37:27 crc kubenswrapper[4718]: I1124 08:37:27.228582 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podStartSLOduration=99.228562088 podStartE2EDuration="1m39.228562088s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:27.228346233 +0000 UTC m=+119.344637137" watchObservedRunningTime="2025-11-24 08:37:27.228562088 +0000 UTC m=+119.344853012" Nov 24 08:37:27 crc kubenswrapper[4718]: I1124 08:37:27.369212 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-ctdmz"] Nov 24 08:37:27 crc kubenswrapper[4718]: I1124 08:37:27.369337 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:27 crc kubenswrapper[4718]: E1124 08:37:27.369425 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:28 crc kubenswrapper[4718]: E1124 08:37:28.547226 4718 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Nov 24 08:37:28 crc kubenswrapper[4718]: I1124 08:37:28.595845 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:28 crc kubenswrapper[4718]: I1124 08:37:28.595920 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:28 crc kubenswrapper[4718]: I1124 08:37:28.595920 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:28 crc kubenswrapper[4718]: E1124 08:37:28.597560 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:28 crc kubenswrapper[4718]: I1124 08:37:28.597569 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:28 crc kubenswrapper[4718]: E1124 08:37:28.597632 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:28 crc kubenswrapper[4718]: E1124 08:37:28.597676 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:28 crc kubenswrapper[4718]: E1124 08:37:28.597700 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:28 crc kubenswrapper[4718]: E1124 08:37:28.691649 4718 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 08:37:30 crc kubenswrapper[4718]: I1124 08:37:30.595894 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:30 crc kubenswrapper[4718]: I1124 08:37:30.595909 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:30 crc kubenswrapper[4718]: I1124 08:37:30.595934 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:30 crc kubenswrapper[4718]: E1124 08:37:30.596147 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:30 crc kubenswrapper[4718]: E1124 08:37:30.596236 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:30 crc kubenswrapper[4718]: I1124 08:37:30.596289 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:30 crc kubenswrapper[4718]: E1124 08:37:30.596335 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:30 crc kubenswrapper[4718]: E1124 08:37:30.596410 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:32 crc kubenswrapper[4718]: I1124 08:37:32.596355 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:32 crc kubenswrapper[4718]: I1124 08:37:32.596355 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:32 crc kubenswrapper[4718]: I1124 08:37:32.596386 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:32 crc kubenswrapper[4718]: E1124 08:37:32.597393 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:32 crc kubenswrapper[4718]: E1124 08:37:32.597250 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:32 crc kubenswrapper[4718]: E1124 08:37:32.597444 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:32 crc kubenswrapper[4718]: I1124 08:37:32.596412 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:32 crc kubenswrapper[4718]: E1124 08:37:32.597504 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:33 crc kubenswrapper[4718]: E1124 08:37:33.693645 4718 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 08:37:34 crc kubenswrapper[4718]: I1124 08:37:34.596284 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:34 crc kubenswrapper[4718]: I1124 08:37:34.596388 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:34 crc kubenswrapper[4718]: I1124 08:37:34.596544 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:34 crc kubenswrapper[4718]: E1124 08:37:34.596650 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:34 crc kubenswrapper[4718]: E1124 08:37:34.596799 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:34 crc kubenswrapper[4718]: E1124 08:37:34.596942 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:34 crc kubenswrapper[4718]: I1124 08:37:34.597163 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:34 crc kubenswrapper[4718]: E1124 08:37:34.597256 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:36 crc kubenswrapper[4718]: I1124 08:37:36.596033 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:36 crc kubenswrapper[4718]: I1124 08:37:36.596129 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:36 crc kubenswrapper[4718]: I1124 08:37:36.596079 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:36 crc kubenswrapper[4718]: I1124 08:37:36.596033 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:36 crc kubenswrapper[4718]: E1124 08:37:36.596325 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:36 crc kubenswrapper[4718]: E1124 08:37:36.596393 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:36 crc kubenswrapper[4718]: E1124 08:37:36.596467 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:36 crc kubenswrapper[4718]: E1124 08:37:36.596546 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:37 crc kubenswrapper[4718]: I1124 08:37:37.648641 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:37:38 crc kubenswrapper[4718]: I1124 08:37:38.595942 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:38 crc kubenswrapper[4718]: I1124 08:37:38.596035 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:38 crc kubenswrapper[4718]: I1124 08:37:38.596133 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:38 crc kubenswrapper[4718]: E1124 08:37:38.596190 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:38 crc kubenswrapper[4718]: I1124 08:37:38.596262 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:38 crc kubenswrapper[4718]: E1124 08:37:38.596453 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:38 crc kubenswrapper[4718]: E1124 08:37:38.596733 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:38 crc kubenswrapper[4718]: I1124 08:37:38.597359 4718 scope.go:117] "RemoveContainer" containerID="308a885775330d5e1550c9c740909b7bb1bbff451fa1d5b196af6263de715424" Nov 24 08:37:38 crc kubenswrapper[4718]: E1124 08:37:38.598140 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:38 crc kubenswrapper[4718]: E1124 08:37:38.694791 4718 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 08:37:39 crc kubenswrapper[4718]: I1124 08:37:39.242267 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zvlvh_811ba3ee-aad5-427c-84f7-fbd3b78255ec/kube-multus/1.log" Nov 24 08:37:39 crc kubenswrapper[4718]: I1124 08:37:39.242339 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zvlvh" event={"ID":"811ba3ee-aad5-427c-84f7-fbd3b78255ec","Type":"ContainerStarted","Data":"d8ca5e7bf3e442fc225cdf16965d3a1960705e567187f8aae5a8e47e781c4a3c"} Nov 24 08:37:40 crc kubenswrapper[4718]: I1124 08:37:40.596070 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:40 crc kubenswrapper[4718]: I1124 08:37:40.596099 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:40 crc kubenswrapper[4718]: I1124 08:37:40.596165 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:40 crc kubenswrapper[4718]: E1124 08:37:40.596194 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:40 crc kubenswrapper[4718]: I1124 08:37:40.596194 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:40 crc kubenswrapper[4718]: E1124 08:37:40.596293 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:40 crc kubenswrapper[4718]: E1124 08:37:40.596368 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:40 crc kubenswrapper[4718]: E1124 08:37:40.596417 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:42 crc kubenswrapper[4718]: I1124 08:37:42.595738 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:42 crc kubenswrapper[4718]: I1124 08:37:42.595822 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:42 crc kubenswrapper[4718]: E1124 08:37:42.595893 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 08:37:42 crc kubenswrapper[4718]: E1124 08:37:42.595999 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ctdmz" podUID="3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97" Nov 24 08:37:42 crc kubenswrapper[4718]: I1124 08:37:42.595759 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:42 crc kubenswrapper[4718]: I1124 08:37:42.596171 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:42 crc kubenswrapper[4718]: E1124 08:37:42.596256 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 08:37:42 crc kubenswrapper[4718]: E1124 08:37:42.597089 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 08:37:44 crc kubenswrapper[4718]: I1124 08:37:44.595508 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:44 crc kubenswrapper[4718]: I1124 08:37:44.595567 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:37:44 crc kubenswrapper[4718]: I1124 08:37:44.595658 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:44 crc kubenswrapper[4718]: I1124 08:37:44.595802 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:44 crc kubenswrapper[4718]: I1124 08:37:44.598211 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Nov 24 08:37:44 crc kubenswrapper[4718]: I1124 08:37:44.598501 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Nov 24 08:37:44 crc kubenswrapper[4718]: I1124 08:37:44.598639 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Nov 24 08:37:44 crc kubenswrapper[4718]: I1124 08:37:44.598689 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Nov 24 08:37:44 crc kubenswrapper[4718]: I1124 08:37:44.599176 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Nov 24 08:37:44 crc kubenswrapper[4718]: I1124 08:37:44.599441 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.785674 4718 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.819287 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-n9vxz"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.819727 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.819883 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.820702 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.822207 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-dwl5h"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.822684 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.825049 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.826455 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.826913 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.827277 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.828799 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.829143 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.829290 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.829454 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-7hkr4"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.829640 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.829755 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.829767 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-7hkr4" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.829876 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.832828 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mb78g"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.833294 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qq48b"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.833498 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.833813 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.834135 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mb78g" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.834348 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qq48b" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.836280 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.836285 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.836546 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.836851 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.837022 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.837083 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.836960 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.837170 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.837243 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.837746 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-65qt8"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.838279 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.841812 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.841923 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.842358 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.842528 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.842625 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.842700 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.842701 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.847091 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.848047 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.848227 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.848510 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.850412 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.850638 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.850703 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.850910 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.851170 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.854727 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xjcpp"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.866502 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.867451 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.867926 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-x2j5v"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.868232 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.868505 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.869781 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-fw72r"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.870181 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-sqk2g"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.870504 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-sqk2g" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.870689 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.870874 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.870878 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-fw72r" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.870986 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.871074 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.871115 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.871254 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.871264 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.871365 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.871425 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.871458 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.871531 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.871603 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.871693 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.871736 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.871746 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.871784 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.871820 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.871847 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.871941 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.871988 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.872057 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.872457 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.873315 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.875920 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.876442 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.876910 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.882316 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-p6k5z"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.882916 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.882930 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.883439 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.883524 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.883565 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.883643 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.883714 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.883747 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.883753 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.883826 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.883859 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.884057 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-p6k5z" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.884082 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.884105 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.884187 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.884237 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.884304 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.884410 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.884461 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.884640 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.884861 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.885114 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.885683 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.886144 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.886388 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-hq4j2"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.887005 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.887117 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.887260 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.887675 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.889374 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.889490 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.889642 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-cd9xl"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.890188 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-jrkxw"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.890292 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.890730 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.890898 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.890962 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-cd9xl" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.891831 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-r2v9t"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.892198 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.892551 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.893598 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.893748 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.893909 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.894147 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.894155 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.906557 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vb4dp"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.907223 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-tk2jz"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.907841 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cqqjk"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.908581 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cqqjk" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.909248 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vb4dp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.909433 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk2jz" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.911734 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.912567 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/68ad885b-9cc5-4361-877d-bdf7e1934c7e-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mb78g\" (UID: \"68ad885b-9cc5-4361-877d-bdf7e1934c7e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mb78g" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.912605 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-qq48b\" (UID: \"fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qq48b" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.912631 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.912652 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b5d227e-1a6b-466b-b380-1e5f7d407e0f-config\") pod \"machine-api-operator-5694c8668f-7hkr4\" (UID: \"4b5d227e-1a6b-466b-b380-1e5f7d407e0f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7hkr4" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.912672 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/575d6e01-e969-4c4b-8e9b-20f68bfc57e9-config\") pod \"machine-approver-56656f9798-fc2mt\" (UID: \"575d6e01-e969-4c4b-8e9b-20f68bfc57e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.912692 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7bb84203-369a-468c-9b00-c4a5650b88c8-audit-dir\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.912711 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-node-pullsecrets\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.912737 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b3032dfa-2d47-4afb-870f-244eeace9aa2-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.912758 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24sxf\" (UniqueName: \"kubernetes.io/projected/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-kube-api-access-24sxf\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.912802 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/4b5d227e-1a6b-466b-b380-1e5f7d407e0f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7hkr4\" (UID: \"4b5d227e-1a6b-466b-b380-1e5f7d407e0f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7hkr4" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.912868 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.912898 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913041 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-audit\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913203 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gt6wn\" (UniqueName: \"kubernetes.io/projected/f1adaa2b-e2cf-412a-8e38-ceb15ba12637-kube-api-access-gt6wn\") pod \"authentication-operator-69f744f599-65qt8\" (UID: \"f1adaa2b-e2cf-412a-8e38-ceb15ba12637\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913226 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b17e4ccd-f166-4933-99ec-ef4a0445ef30-console-config\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913250 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-config\") pod \"controller-manager-879f6c89f-n9vxz\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913293 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qqcl\" (UniqueName: \"kubernetes.io/projected/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-kube-api-access-8qqcl\") pod \"controller-manager-879f6c89f-n9vxz\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913312 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f1adaa2b-e2cf-412a-8e38-ceb15ba12637-serving-cert\") pod \"authentication-operator-69f744f599-65qt8\" (UID: \"f1adaa2b-e2cf-412a-8e38-ceb15ba12637\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913365 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913393 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913411 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jhtm\" (UniqueName: \"kubernetes.io/projected/575d6e01-e969-4c4b-8e9b-20f68bfc57e9-kube-api-access-6jhtm\") pod \"machine-approver-56656f9798-fc2mt\" (UID: \"575d6e01-e969-4c4b-8e9b-20f68bfc57e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913431 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b17e4ccd-f166-4933-99ec-ef4a0445ef30-service-ca\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913458 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b17e4ccd-f166-4933-99ec-ef4a0445ef30-console-oauth-config\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913486 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6djj\" (UniqueName: \"kubernetes.io/projected/b17e4ccd-f166-4933-99ec-ef4a0445ef30-kube-api-access-d6djj\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913512 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w449h\" (UniqueName: \"kubernetes.io/projected/fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d-kube-api-access-w449h\") pod \"openshift-apiserver-operator-796bbdcf4f-qq48b\" (UID: \"fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qq48b" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913539 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913574 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-client-ca\") pod \"controller-manager-879f6c89f-n9vxz\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913592 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913616 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b3032dfa-2d47-4afb-870f-244eeace9aa2-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913755 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-config\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913803 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4b5d227e-1a6b-466b-b380-1e5f7d407e0f-images\") pod \"machine-api-operator-5694c8668f-7hkr4\" (UID: \"4b5d227e-1a6b-466b-b380-1e5f7d407e0f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7hkr4" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913858 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b3032dfa-2d47-4afb-870f-244eeace9aa2-etcd-client\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913881 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-audit-dir\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.913914 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cwl5\" (UniqueName: \"kubernetes.io/projected/0b3da8f2-2160-4e82-94fa-a44757b4a481-kube-api-access-2cwl5\") pod \"openshift-config-operator-7777fb866f-sk6hx\" (UID: \"0b3da8f2-2160-4e82-94fa-a44757b4a481\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.914030 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-audit-policies\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.914063 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.914091 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-image-import-ca\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.916239 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.917132 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1adaa2b-e2cf-412a-8e38-ceb15ba12637-config\") pod \"authentication-operator-69f744f599-65qt8\" (UID: \"f1adaa2b-e2cf-412a-8e38-ceb15ba12637\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.917280 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b3032dfa-2d47-4afb-870f-244eeace9aa2-audit-policies\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.917332 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b3da8f2-2160-4e82-94fa-a44757b4a481-serving-cert\") pod \"openshift-config-operator-7777fb866f-sk6hx\" (UID: \"0b3da8f2-2160-4e82-94fa-a44757b4a481\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.917383 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.917428 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-n9vxz\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.917459 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b17e4ccd-f166-4933-99ec-ef4a0445ef30-trusted-ca-bundle\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.917478 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.917499 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b3032dfa-2d47-4afb-870f-244eeace9aa2-encryption-config\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.917585 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.917623 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d76b2\" (UniqueName: \"kubernetes.io/projected/b3032dfa-2d47-4afb-870f-244eeace9aa2-kube-api-access-d76b2\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.926444 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.929096 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.931013 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.920071 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btwkr\" (UniqueName: \"kubernetes.io/projected/7bb84203-369a-468c-9b00-c4a5650b88c8-kube-api-access-btwkr\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.932668 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f1adaa2b-e2cf-412a-8e38-ceb15ba12637-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-65qt8\" (UID: \"f1adaa2b-e2cf-412a-8e38-ceb15ba12637\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.932690 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-etcd-client\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.932711 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-qq48b\" (UID: \"fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qq48b" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.932733 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.932753 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-trusted-ca-bundle\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.932774 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f1adaa2b-e2cf-412a-8e38-ceb15ba12637-service-ca-bundle\") pod \"authentication-operator-69f744f599-65qt8\" (UID: \"f1adaa2b-e2cf-412a-8e38-ceb15ba12637\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.932809 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5mdq\" (UniqueName: \"kubernetes.io/projected/68ad885b-9cc5-4361-877d-bdf7e1934c7e-kube-api-access-l5mdq\") pod \"cluster-samples-operator-665b6dd947-mb78g\" (UID: \"68ad885b-9cc5-4361-877d-bdf7e1934c7e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mb78g" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.932832 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-etcd-serving-ca\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.932853 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/575d6e01-e969-4c4b-8e9b-20f68bfc57e9-auth-proxy-config\") pod \"machine-approver-56656f9798-fc2mt\" (UID: \"575d6e01-e969-4c4b-8e9b-20f68bfc57e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.932873 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-encryption-config\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.932888 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b17e4ccd-f166-4933-99ec-ef4a0445ef30-console-serving-cert\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.932898 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.933152 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-fgz8k"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.933723 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-fgz8k" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.932906 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnbbx\" (UniqueName: \"kubernetes.io/projected/4b5d227e-1a6b-466b-b380-1e5f7d407e0f-kube-api-access-gnbbx\") pod \"machine-api-operator-5694c8668f-7hkr4\" (UID: \"4b5d227e-1a6b-466b-b380-1e5f7d407e0f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7hkr4" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.933947 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/575d6e01-e969-4c4b-8e9b-20f68bfc57e9-machine-approver-tls\") pod \"machine-approver-56656f9798-fc2mt\" (UID: \"575d6e01-e969-4c4b-8e9b-20f68bfc57e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.933967 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b17e4ccd-f166-4933-99ec-ef4a0445ef30-oauth-serving-cert\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.934007 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/0b3da8f2-2160-4e82-94fa-a44757b4a481-available-featuregates\") pod \"openshift-config-operator-7777fb866f-sk6hx\" (UID: \"0b3da8f2-2160-4e82-94fa-a44757b4a481\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.934027 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3032dfa-2d47-4afb-870f-244eeace9aa2-serving-cert\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.934042 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b3032dfa-2d47-4afb-870f-244eeace9aa2-audit-dir\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.934057 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-serving-cert\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.934072 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-serving-cert\") pod \"controller-manager-879f6c89f-n9vxz\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.939804 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.940386 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.940936 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.941399 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.943387 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.944123 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8rtrx"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.944889 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.945512 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.950885 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.951540 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-s27jw"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.951817 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.952100 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.952469 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-v4765"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.952741 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-s27jw" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.952820 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.953070 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8rtrx" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.953126 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.953170 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.953132 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j77qb"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.953342 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.954109 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6cxd"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.954508 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6cxd" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.956871 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-v4765" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.959110 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.959801 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-n9vxz"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.959896 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.963208 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kzq5s"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.963801 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kzq5s" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.964592 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-z4pwz"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.965046 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j77qb" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.965132 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.965269 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.966325 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.967023 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.967302 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mb78g"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.969245 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-fw72r"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.973453 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qq48b"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.974800 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-65qt8"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.977633 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xjcpp"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.977921 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-x2j5v"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.983281 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.989615 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-dwl5h"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.991479 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.993522 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-c6s86"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.997060 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-s27jw"] Nov 24 08:37:47 crc kubenswrapper[4718]: I1124 08:37:47.997244 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-c6s86" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.003545 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.006121 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-fgz8k"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.013427 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-p6k5z"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.016962 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.024264 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-cd9xl"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.024415 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-r2v9t"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.031104 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-tk2jz"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.031163 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.032019 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.032632 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-sqk2g"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.037053 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cwl5\" (UniqueName: \"kubernetes.io/projected/0b3da8f2-2160-4e82-94fa-a44757b4a481-kube-api-access-2cwl5\") pod \"openshift-config-operator-7777fb866f-sk6hx\" (UID: \"0b3da8f2-2160-4e82-94fa-a44757b4a481\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.037337 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da69e256-e90c-4fde-94f6-d08522e2f3da-config-volume\") pod \"collect-profiles-29399550-lxjc5\" (UID: \"da69e256-e90c-4fde-94f6-d08522e2f3da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.037400 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/da69e256-e90c-4fde-94f6-d08522e2f3da-secret-volume\") pod \"collect-profiles-29399550-lxjc5\" (UID: \"da69e256-e90c-4fde-94f6-d08522e2f3da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.037424 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a79c2f3-4280-485b-80ea-239298e165f3-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-p6k5z\" (UID: \"8a79c2f3-4280-485b-80ea-239298e165f3\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-p6k5z" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.037478 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98d95fdd-1d35-40d5-bb23-7c605ea5e392-config\") pod \"kube-apiserver-operator-766d6c64bb-8rtrx\" (UID: \"98d95fdd-1d35-40d5-bb23-7c605ea5e392\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8rtrx" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.037501 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a79c2f3-4280-485b-80ea-239298e165f3-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-p6k5z\" (UID: \"8a79c2f3-4280-485b-80ea-239298e165f3\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-p6k5z" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.037555 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87a7e109-865a-444f-8e06-ba8a6ff6e6e8-config\") pod \"etcd-operator-b45778765-hq4j2\" (UID: \"87a7e109-865a-444f-8e06-ba8a6ff6e6e8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.037583 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1adaa2b-e2cf-412a-8e38-ceb15ba12637-config\") pod \"authentication-operator-69f744f599-65qt8\" (UID: \"f1adaa2b-e2cf-412a-8e38-ceb15ba12637\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.037721 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d8dfe303-3867-4258-854b-c4655768faeb-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-fgz8k\" (UID: \"d8dfe303-3867-4258-854b-c4655768faeb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fgz8k" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.037749 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22fwh\" (UniqueName: \"kubernetes.io/projected/f1f4765d-f9d8-4590-99b4-e1e0823424cd-kube-api-access-22fwh\") pod \"control-plane-machine-set-operator-78cbb6b69f-kzq5s\" (UID: \"f1f4765d-f9d8-4590-99b4-e1e0823424cd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kzq5s" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.037889 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.037909 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d037ebb5-19ab-471e-b627-3b0487dfa12c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-z4pwz\" (UID: \"d037ebb5-19ab-471e-b627-3b0487dfa12c\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038006 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b3032dfa-2d47-4afb-870f-244eeace9aa2-encryption-config\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038031 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038050 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/d48d710f-a4a0-402e-b403-58577c79294d-signing-key\") pod \"service-ca-9c57cc56f-s27jw\" (UID: \"d48d710f-a4a0-402e-b403-58577c79294d\") " pod="openshift-service-ca/service-ca-9c57cc56f-s27jw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038224 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d76b2\" (UniqueName: \"kubernetes.io/projected/b3032dfa-2d47-4afb-870f-244eeace9aa2-kube-api-access-d76b2\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038355 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f1adaa2b-e2cf-412a-8e38-ceb15ba12637-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-65qt8\" (UID: \"f1adaa2b-e2cf-412a-8e38-ceb15ba12637\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038386 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a43d8c55-d59d-4c9e-9da5-6b333f0916a8-serving-cert\") pod \"service-ca-operator-777779d784-ktfkp\" (UID: \"a43d8c55-d59d-4c9e-9da5-6b333f0916a8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038413 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-config\") pod \"route-controller-manager-6576b87f9c-nmrnw\" (UID: \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038437 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk8zp\" (UniqueName: \"kubernetes.io/projected/9d95a4c1-33fb-4dcd-83eb-5d4e0c666651-kube-api-access-pk8zp\") pod \"olm-operator-6b444d44fb-bfsfw\" (UID: \"9d95a4c1-33fb-4dcd-83eb-5d4e0c666651\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038452 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1adaa2b-e2cf-412a-8e38-ceb15ba12637-config\") pod \"authentication-operator-69f744f599-65qt8\" (UID: \"f1adaa2b-e2cf-412a-8e38-ceb15ba12637\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038460 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8db19a4a-1b83-42c6-8c28-82d91a923903-auth-proxy-config\") pod \"machine-config-operator-74547568cd-lg22l\" (UID: \"8db19a4a-1b83-42c6-8c28-82d91a923903\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038531 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/87a7e109-865a-444f-8e06-ba8a6ff6e6e8-etcd-ca\") pod \"etcd-operator-b45778765-hq4j2\" (UID: \"87a7e109-865a-444f-8e06-ba8a6ff6e6e8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038566 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-etcd-serving-ca\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038592 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f1adaa2b-e2cf-412a-8e38-ceb15ba12637-service-ca-bundle\") pod \"authentication-operator-69f744f599-65qt8\" (UID: \"f1adaa2b-e2cf-412a-8e38-ceb15ba12637\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038616 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1-tmpfs\") pod \"packageserver-d55dfcdfc-pn2ck\" (UID: \"8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038638 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/be8de0a9-f2bf-4b42-8117-6c31ee72abeb-bound-sa-token\") pod \"ingress-operator-5b745b69d9-rphbq\" (UID: \"be8de0a9-f2bf-4b42-8117-6c31ee72abeb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038674 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/575d6e01-e969-4c4b-8e9b-20f68bfc57e9-auth-proxy-config\") pod \"machine-approver-56656f9798-fc2mt\" (UID: \"575d6e01-e969-4c4b-8e9b-20f68bfc57e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038698 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cj5wx\" (UniqueName: \"kubernetes.io/projected/d48d710f-a4a0-402e-b403-58577c79294d-kube-api-access-cj5wx\") pod \"service-ca-9c57cc56f-s27jw\" (UID: \"d48d710f-a4a0-402e-b403-58577c79294d\") " pod="openshift-service-ca/service-ca-9c57cc56f-s27jw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038721 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/98d95fdd-1d35-40d5-bb23-7c605ea5e392-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-8rtrx\" (UID: \"98d95fdd-1d35-40d5-bb23-7c605ea5e392\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8rtrx" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038742 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cdcx\" (UniqueName: \"kubernetes.io/projected/87a7e109-865a-444f-8e06-ba8a6ff6e6e8-kube-api-access-7cdcx\") pod \"etcd-operator-b45778765-hq4j2\" (UID: \"87a7e109-865a-444f-8e06-ba8a6ff6e6e8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038769 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b17e4ccd-f166-4933-99ec-ef4a0445ef30-console-serving-cert\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038791 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b3032dfa-2d47-4afb-870f-244eeace9aa2-audit-dir\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038815 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnbbx\" (UniqueName: \"kubernetes.io/projected/4b5d227e-1a6b-466b-b380-1e5f7d407e0f-kube-api-access-gnbbx\") pod \"machine-api-operator-5694c8668f-7hkr4\" (UID: \"4b5d227e-1a6b-466b-b380-1e5f7d407e0f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7hkr4" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038838 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/575d6e01-e969-4c4b-8e9b-20f68bfc57e9-machine-approver-tls\") pod \"machine-approver-56656f9798-fc2mt\" (UID: \"575d6e01-e969-4c4b-8e9b-20f68bfc57e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038863 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/52e5d554-be8b-4312-a58c-67be24e6d340-srv-cert\") pod \"catalog-operator-68c6474976-jdktz\" (UID: \"52e5d554-be8b-4312-a58c-67be24e6d340\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038886 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/be8de0a9-f2bf-4b42-8117-6c31ee72abeb-metrics-tls\") pod \"ingress-operator-5b745b69d9-rphbq\" (UID: \"be8de0a9-f2bf-4b42-8117-6c31ee72abeb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038908 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3032dfa-2d47-4afb-870f-244eeace9aa2-serving-cert\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038931 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbkgh\" (UniqueName: \"kubernetes.io/projected/8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1-kube-api-access-dbkgh\") pod \"packageserver-d55dfcdfc-pn2ck\" (UID: \"8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038953 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1-apiservice-cert\") pod \"packageserver-d55dfcdfc-pn2ck\" (UID: \"8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.038996 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-qq48b\" (UID: \"fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qq48b" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039023 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039067 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-node-pullsecrets\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039088 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b5d227e-1a6b-466b-b380-1e5f7d407e0f-config\") pod \"machine-api-operator-5694c8668f-7hkr4\" (UID: \"4b5d227e-1a6b-466b-b380-1e5f7d407e0f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7hkr4" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039113 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fs6zk\" (UniqueName: \"kubernetes.io/projected/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-kube-api-access-fs6zk\") pod \"route-controller-manager-6576b87f9c-nmrnw\" (UID: \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039136 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdd4cfba-19fe-4e2d-bf3a-12732fcad83c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cqqjk\" (UID: \"cdd4cfba-19fe-4e2d-bf3a-12732fcad83c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cqqjk" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039265 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/87a7e109-865a-444f-8e06-ba8a6ff6e6e8-etcd-client\") pod \"etcd-operator-b45778765-hq4j2\" (UID: \"87a7e109-865a-444f-8e06-ba8a6ff6e6e8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039294 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/4b5d227e-1a6b-466b-b380-1e5f7d407e0f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7hkr4\" (UID: \"4b5d227e-1a6b-466b-b380-1e5f7d407e0f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7hkr4" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039318 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/47cf3417-8f00-44ea-82ca-5d60401f3754-stats-auth\") pod \"router-default-5444994796-jrkxw\" (UID: \"47cf3417-8f00-44ea-82ca-5d60401f3754\") " pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039343 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24sxf\" (UniqueName: \"kubernetes.io/projected/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-kube-api-access-24sxf\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039370 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039397 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039421 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46kgn\" (UniqueName: \"kubernetes.io/projected/33484a8a-7b9c-4faa-901a-666830edd1f1-kube-api-access-46kgn\") pod \"console-operator-58897d9998-sqk2g\" (UID: \"33484a8a-7b9c-4faa-901a-666830edd1f1\") " pod="openshift-console-operator/console-operator-58897d9998-sqk2g" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039448 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9d95a4c1-33fb-4dcd-83eb-5d4e0c666651-profile-collector-cert\") pod \"olm-operator-6b444d44fb-bfsfw\" (UID: \"9d95a4c1-33fb-4dcd-83eb-5d4e0c666651\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039473 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-audit\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039498 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gt6wn\" (UniqueName: \"kubernetes.io/projected/f1adaa2b-e2cf-412a-8e38-ceb15ba12637-kube-api-access-gt6wn\") pod \"authentication-operator-69f744f599-65qt8\" (UID: \"f1adaa2b-e2cf-412a-8e38-ceb15ba12637\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039521 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed31fc96-34c7-4136-94fb-e2a0a41e0589-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6cxd\" (UID: \"ed31fc96-34c7-4136-94fb-e2a0a41e0589\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6cxd" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039555 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-client-ca\") pod \"route-controller-manager-6576b87f9c-nmrnw\" (UID: \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039582 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vt2wq\" (UniqueName: \"kubernetes.io/projected/c0272bee-0b74-4b1b-80c4-213d866d6479-kube-api-access-vt2wq\") pod \"migrator-59844c95c7-v4765\" (UID: \"c0272bee-0b74-4b1b-80c4-213d866d6479\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-v4765" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039620 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f1adaa2b-e2cf-412a-8e38-ceb15ba12637-serving-cert\") pod \"authentication-operator-69f744f599-65qt8\" (UID: \"f1adaa2b-e2cf-412a-8e38-ceb15ba12637\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039647 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2vzl\" (UniqueName: \"kubernetes.io/projected/8a79c2f3-4280-485b-80ea-239298e165f3-kube-api-access-m2vzl\") pod \"openshift-controller-manager-operator-756b6f6bc6-p6k5z\" (UID: \"8a79c2f3-4280-485b-80ea-239298e165f3\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-p6k5z" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039669 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33484a8a-7b9c-4faa-901a-666830edd1f1-serving-cert\") pod \"console-operator-58897d9998-sqk2g\" (UID: \"33484a8a-7b9c-4faa-901a-666830edd1f1\") " pod="openshift-console-operator/console-operator-58897d9998-sqk2g" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039695 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jhtm\" (UniqueName: \"kubernetes.io/projected/575d6e01-e969-4c4b-8e9b-20f68bfc57e9-kube-api-access-6jhtm\") pod \"machine-approver-56656f9798-fc2mt\" (UID: \"575d6e01-e969-4c4b-8e9b-20f68bfc57e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039718 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/47cf3417-8f00-44ea-82ca-5d60401f3754-service-ca-bundle\") pod \"router-default-5444994796-jrkxw\" (UID: \"47cf3417-8f00-44ea-82ca-5d60401f3754\") " pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039744 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/98d95fdd-1d35-40d5-bb23-7c605ea5e392-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-8rtrx\" (UID: \"98d95fdd-1d35-40d5-bb23-7c605ea5e392\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8rtrx" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039769 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w449h\" (UniqueName: \"kubernetes.io/projected/fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d-kube-api-access-w449h\") pod \"openshift-apiserver-operator-796bbdcf4f-qq48b\" (UID: \"fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qq48b" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039791 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a43d8c55-d59d-4c9e-9da5-6b333f0916a8-config\") pod \"service-ca-operator-777779d784-ktfkp\" (UID: \"a43d8c55-d59d-4c9e-9da5-6b333f0916a8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039814 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/021efb9a-dc27-4590-b85f-9d8be1dac72a-proxy-tls\") pod \"machine-config-controller-84d6567774-tk2jz\" (UID: \"021efb9a-dc27-4590-b85f-9d8be1dac72a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk2jz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039838 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ss9zd\" (UniqueName: \"kubernetes.io/projected/ed31fc96-34c7-4136-94fb-e2a0a41e0589-kube-api-access-ss9zd\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6cxd\" (UID: \"ed31fc96-34c7-4136-94fb-e2a0a41e0589\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6cxd" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039864 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-494qt\" (UniqueName: \"kubernetes.io/projected/d037ebb5-19ab-471e-b627-3b0487dfa12c-kube-api-access-494qt\") pod \"marketplace-operator-79b997595-z4pwz\" (UID: \"d037ebb5-19ab-471e-b627-3b0487dfa12c\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039892 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039918 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039941 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bf4m\" (UniqueName: \"kubernetes.io/projected/8db19a4a-1b83-42c6-8c28-82d91a923903-kube-api-access-7bf4m\") pod \"machine-config-operator-74547568cd-lg22l\" (UID: \"8db19a4a-1b83-42c6-8c28-82d91a923903\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.039983 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cdd4cfba-19fe-4e2d-bf3a-12732fcad83c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cqqjk\" (UID: \"cdd4cfba-19fe-4e2d-bf3a-12732fcad83c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cqqjk" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040011 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b3032dfa-2d47-4afb-870f-244eeace9aa2-etcd-client\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040032 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b3032dfa-2d47-4afb-870f-244eeace9aa2-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040054 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-config\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040076 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bc24762a-782b-43e4-a603-d3db1f587e02-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-pz29p\" (UID: \"bc24762a-782b-43e4-a603-d3db1f587e02\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040098 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/47cf3417-8f00-44ea-82ca-5d60401f3754-metrics-certs\") pod \"router-default-5444994796-jrkxw\" (UID: \"47cf3417-8f00-44ea-82ca-5d60401f3754\") " pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040135 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87a7e109-865a-444f-8e06-ba8a6ff6e6e8-serving-cert\") pod \"etcd-operator-b45778765-hq4j2\" (UID: \"87a7e109-865a-444f-8e06-ba8a6ff6e6e8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040152 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w66r9\" (UniqueName: \"kubernetes.io/projected/47cf3417-8f00-44ea-82ca-5d60401f3754-kube-api-access-w66r9\") pod \"router-default-5444994796-jrkxw\" (UID: \"47cf3417-8f00-44ea-82ca-5d60401f3754\") " pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040174 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-audit-dir\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040199 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040214 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-image-import-ca\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040232 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33484a8a-7b9c-4faa-901a-666830edd1f1-config\") pod \"console-operator-58897d9998-sqk2g\" (UID: \"33484a8a-7b9c-4faa-901a-666830edd1f1\") " pod="openshift-console-operator/console-operator-58897d9998-sqk2g" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040248 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-audit-policies\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040265 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b3032dfa-2d47-4afb-870f-244eeace9aa2-audit-policies\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040280 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b3da8f2-2160-4e82-94fa-a44757b4a481-serving-cert\") pod \"openshift-config-operator-7777fb866f-sk6hx\" (UID: \"0b3da8f2-2160-4e82-94fa-a44757b4a481\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040300 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-n9vxz\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040319 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b17e4ccd-f166-4933-99ec-ef4a0445ef30-trusted-ca-bundle\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040334 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1-webhook-cert\") pod \"packageserver-d55dfcdfc-pn2ck\" (UID: \"8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040352 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-serving-cert\") pod \"route-controller-manager-6576b87f9c-nmrnw\" (UID: \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040368 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btwkr\" (UniqueName: \"kubernetes.io/projected/7bb84203-369a-468c-9b00-c4a5650b88c8-kube-api-access-btwkr\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040386 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bc24762a-782b-43e4-a603-d3db1f587e02-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-pz29p\" (UID: \"bc24762a-782b-43e4-a603-d3db1f587e02\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040405 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/454295a5-405c-4698-9fc0-0fda6968cc99-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-vb4dp\" (UID: \"454295a5-405c-4698-9fc0-0fda6968cc99\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vb4dp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040420 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/454295a5-405c-4698-9fc0-0fda6968cc99-config\") pod \"kube-controller-manager-operator-78b949d7b-vb4dp\" (UID: \"454295a5-405c-4698-9fc0-0fda6968cc99\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vb4dp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040436 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n55k4\" (UniqueName: \"kubernetes.io/projected/fddc4425-bbc6-4907-a27a-d76661f26708-kube-api-access-n55k4\") pod \"dns-operator-744455d44c-cd9xl\" (UID: \"fddc4425-bbc6-4907-a27a-d76661f26708\") " pod="openshift-dns-operator/dns-operator-744455d44c-cd9xl" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040475 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-etcd-client\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040501 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-qq48b\" (UID: \"fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qq48b" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040523 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/d48d710f-a4a0-402e-b403-58577c79294d-signing-cabundle\") pod \"service-ca-9c57cc56f-s27jw\" (UID: \"d48d710f-a4a0-402e-b403-58577c79294d\") " pod="openshift-service-ca/service-ca-9c57cc56f-s27jw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040538 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/47cf3417-8f00-44ea-82ca-5d60401f3754-default-certificate\") pod \"router-default-5444994796-jrkxw\" (UID: \"47cf3417-8f00-44ea-82ca-5d60401f3754\") " pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040581 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040604 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-trusted-ca-bundle\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040626 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/454295a5-405c-4698-9fc0-0fda6968cc99-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-vb4dp\" (UID: \"454295a5-405c-4698-9fc0-0fda6968cc99\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vb4dp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040645 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed31fc96-34c7-4136-94fb-e2a0a41e0589-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6cxd\" (UID: \"ed31fc96-34c7-4136-94fb-e2a0a41e0589\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6cxd" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040671 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5mdq\" (UniqueName: \"kubernetes.io/projected/68ad885b-9cc5-4361-877d-bdf7e1934c7e-kube-api-access-l5mdq\") pod \"cluster-samples-operator-665b6dd947-mb78g\" (UID: \"68ad885b-9cc5-4361-877d-bdf7e1934c7e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mb78g" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040691 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/fddc4425-bbc6-4907-a27a-d76661f26708-metrics-tls\") pod \"dns-operator-744455d44c-cd9xl\" (UID: \"fddc4425-bbc6-4907-a27a-d76661f26708\") " pod="openshift-dns-operator/dns-operator-744455d44c-cd9xl" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040716 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwrq7\" (UniqueName: \"kubernetes.io/projected/bc24762a-782b-43e4-a603-d3db1f587e02-kube-api-access-pwrq7\") pod \"cluster-image-registry-operator-dc59b4c8b-pz29p\" (UID: \"bc24762a-782b-43e4-a603-d3db1f587e02\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040734 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-encryption-config\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040811 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b17e4ccd-f166-4933-99ec-ef4a0445ef30-oauth-serving-cert\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040844 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/0b3da8f2-2160-4e82-94fa-a44757b4a481-available-featuregates\") pod \"openshift-config-operator-7777fb866f-sk6hx\" (UID: \"0b3da8f2-2160-4e82-94fa-a44757b4a481\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040920 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-serving-cert\") pod \"controller-manager-879f6c89f-n9vxz\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.040991 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-serving-cert\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041014 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/68ad885b-9cc5-4361-877d-bdf7e1934c7e-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mb78g\" (UID: \"68ad885b-9cc5-4361-877d-bdf7e1934c7e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mb78g" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041031 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/8db19a4a-1b83-42c6-8c28-82d91a923903-images\") pod \"machine-config-operator-74547568cd-lg22l\" (UID: \"8db19a4a-1b83-42c6-8c28-82d91a923903\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041047 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8db19a4a-1b83-42c6-8c28-82d91a923903-proxy-tls\") pod \"machine-config-operator-74547568cd-lg22l\" (UID: \"8db19a4a-1b83-42c6-8c28-82d91a923903\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041064 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdd4cfba-19fe-4e2d-bf3a-12732fcad83c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cqqjk\" (UID: \"cdd4cfba-19fe-4e2d-bf3a-12732fcad83c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cqqjk" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041083 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7bb84203-369a-468c-9b00-c4a5650b88c8-audit-dir\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041101 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/575d6e01-e969-4c4b-8e9b-20f68bfc57e9-config\") pod \"machine-approver-56656f9798-fc2mt\" (UID: \"575d6e01-e969-4c4b-8e9b-20f68bfc57e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041131 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b3032dfa-2d47-4afb-870f-244eeace9aa2-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041148 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9d95a4c1-33fb-4dcd-83eb-5d4e0c666651-srv-cert\") pod \"olm-operator-6b444d44fb-bfsfw\" (UID: \"9d95a4c1-33fb-4dcd-83eb-5d4e0c666651\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041166 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bc24762a-782b-43e4-a603-d3db1f587e02-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-pz29p\" (UID: \"bc24762a-782b-43e4-a603-d3db1f587e02\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041218 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5666\" (UniqueName: \"kubernetes.io/projected/021efb9a-dc27-4590-b85f-9d8be1dac72a-kube-api-access-x5666\") pod \"machine-config-controller-84d6567774-tk2jz\" (UID: \"021efb9a-dc27-4590-b85f-9d8be1dac72a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk2jz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041237 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-config\") pod \"controller-manager-879f6c89f-n9vxz\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041254 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qqcl\" (UniqueName: \"kubernetes.io/projected/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-kube-api-access-8qqcl\") pod \"controller-manager-879f6c89f-n9vxz\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041270 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b17e4ccd-f166-4933-99ec-ef4a0445ef30-console-config\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041287 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/021efb9a-dc27-4590-b85f-9d8be1dac72a-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-tk2jz\" (UID: \"021efb9a-dc27-4590-b85f-9d8be1dac72a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk2jz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041304 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmvsd\" (UniqueName: \"kubernetes.io/projected/d8dfe303-3867-4258-854b-c4655768faeb-kube-api-access-bmvsd\") pod \"multus-admission-controller-857f4d67dd-fgz8k\" (UID: \"d8dfe303-3867-4258-854b-c4655768faeb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fgz8k" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041321 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/33484a8a-7b9c-4faa-901a-666830edd1f1-trusted-ca\") pod \"console-operator-58897d9998-sqk2g\" (UID: \"33484a8a-7b9c-4faa-901a-666830edd1f1\") " pod="openshift-console-operator/console-operator-58897d9998-sqk2g" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041339 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/52e5d554-be8b-4312-a58c-67be24e6d340-profile-collector-cert\") pod \"catalog-operator-68c6474976-jdktz\" (UID: \"52e5d554-be8b-4312-a58c-67be24e6d340\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041357 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f1f4765d-f9d8-4590-99b4-e1e0823424cd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kzq5s\" (UID: \"f1f4765d-f9d8-4590-99b4-e1e0823424cd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kzq5s" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041377 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041394 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbzgh\" (UniqueName: \"kubernetes.io/projected/15854168-726d-44b5-80e7-d1ca941c2941-kube-api-access-rbzgh\") pod \"downloads-7954f5f757-fw72r\" (UID: \"15854168-726d-44b5-80e7-d1ca941c2941\") " pod="openshift-console/downloads-7954f5f757-fw72r" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041409 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/87a7e109-865a-444f-8e06-ba8a6ff6e6e8-etcd-service-ca\") pod \"etcd-operator-b45778765-hq4j2\" (UID: \"87a7e109-865a-444f-8e06-ba8a6ff6e6e8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041426 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b17e4ccd-f166-4933-99ec-ef4a0445ef30-service-ca\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041451 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041479 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b17e4ccd-f166-4933-99ec-ef4a0445ef30-console-oauth-config\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041497 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lfr5\" (UniqueName: \"kubernetes.io/projected/52e5d554-be8b-4312-a58c-67be24e6d340-kube-api-access-8lfr5\") pod \"catalog-operator-68c6474976-jdktz\" (UID: \"52e5d554-be8b-4312-a58c-67be24e6d340\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041513 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d037ebb5-19ab-471e-b627-3b0487dfa12c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-z4pwz\" (UID: \"d037ebb5-19ab-471e-b627-3b0487dfa12c\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041535 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6djj\" (UniqueName: \"kubernetes.io/projected/b17e4ccd-f166-4933-99ec-ef4a0445ef30-kube-api-access-d6djj\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041554 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8slcq\" (UniqueName: \"kubernetes.io/projected/da69e256-e90c-4fde-94f6-d08522e2f3da-kube-api-access-8slcq\") pod \"collect-profiles-29399550-lxjc5\" (UID: \"da69e256-e90c-4fde-94f6-d08522e2f3da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041570 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmm84\" (UniqueName: \"kubernetes.io/projected/be8de0a9-f2bf-4b42-8117-6c31ee72abeb-kube-api-access-nmm84\") pod \"ingress-operator-5b745b69d9-rphbq\" (UID: \"be8de0a9-f2bf-4b42-8117-6c31ee72abeb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041589 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kt5m\" (UniqueName: \"kubernetes.io/projected/a43d8c55-d59d-4c9e-9da5-6b333f0916a8-kube-api-access-4kt5m\") pod \"service-ca-operator-777779d784-ktfkp\" (UID: \"a43d8c55-d59d-4c9e-9da5-6b333f0916a8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041620 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-client-ca\") pod \"controller-manager-879f6c89f-n9vxz\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041638 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4b5d227e-1a6b-466b-b380-1e5f7d407e0f-images\") pod \"machine-api-operator-5694c8668f-7hkr4\" (UID: \"4b5d227e-1a6b-466b-b380-1e5f7d407e0f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7hkr4" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.041653 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/be8de0a9-f2bf-4b42-8117-6c31ee72abeb-trusted-ca\") pod \"ingress-operator-5b745b69d9-rphbq\" (UID: \"be8de0a9-f2bf-4b42-8117-6c31ee72abeb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.042291 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-etcd-serving-ca\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.042328 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.042373 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cqqjk"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.042734 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f1adaa2b-e2cf-412a-8e38-ceb15ba12637-service-ca-bundle\") pod \"authentication-operator-69f744f599-65qt8\" (UID: \"f1adaa2b-e2cf-412a-8e38-ceb15ba12637\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.043215 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/575d6e01-e969-4c4b-8e9b-20f68bfc57e9-auth-proxy-config\") pod \"machine-approver-56656f9798-fc2mt\" (UID: \"575d6e01-e969-4c4b-8e9b-20f68bfc57e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.043587 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8rtrx"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.044902 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f1adaa2b-e2cf-412a-8e38-ceb15ba12637-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-65qt8\" (UID: \"f1adaa2b-e2cf-412a-8e38-ceb15ba12637\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.045254 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-n9vxz\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.045315 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b3032dfa-2d47-4afb-870f-244eeace9aa2-audit-dir\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.045523 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.047909 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.048088 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b3032dfa-2d47-4afb-870f-244eeace9aa2-encryption-config\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.048219 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vb4dp"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.048307 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-v4765"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.048412 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7bb84203-369a-468c-9b00-c4a5650b88c8-audit-dir\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.048917 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/575d6e01-e969-4c4b-8e9b-20f68bfc57e9-config\") pod \"machine-approver-56656f9798-fc2mt\" (UID: \"575d6e01-e969-4c4b-8e9b-20f68bfc57e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.049273 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3032dfa-2d47-4afb-870f-244eeace9aa2-serving-cert\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.049388 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6cxd"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.049556 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b3032dfa-2d47-4afb-870f-244eeace9aa2-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.049805 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b17e4ccd-f166-4933-99ec-ef4a0445ef30-trusted-ca-bundle\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.051306 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/575d6e01-e969-4c4b-8e9b-20f68bfc57e9-machine-approver-tls\") pod \"machine-approver-56656f9798-fc2mt\" (UID: \"575d6e01-e969-4c4b-8e9b-20f68bfc57e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.051536 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-config\") pod \"controller-manager-879f6c89f-n9vxz\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.052498 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b17e4ccd-f166-4933-99ec-ef4a0445ef30-console-config\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.052607 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-etcd-client\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.052850 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.053263 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-qq48b\" (UID: \"fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qq48b" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.053670 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.055200 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-node-pullsecrets\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.055305 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.055703 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-trusted-ca-bundle\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.056075 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b3032dfa-2d47-4afb-870f-244eeace9aa2-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.056363 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.056535 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-audit-dir\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.056606 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b5d227e-1a6b-466b-b380-1e5f7d407e0f-config\") pod \"machine-api-operator-5694c8668f-7hkr4\" (UID: \"4b5d227e-1a6b-466b-b380-1e5f7d407e0f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7hkr4" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.057047 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-audit\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.057646 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b3032dfa-2d47-4afb-870f-244eeace9aa2-etcd-client\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.057795 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-image-import-ca\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.057941 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-qq48b\" (UID: \"fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qq48b" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.058293 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.058466 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.058881 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b17e4ccd-f166-4933-99ec-ef4a0445ef30-oauth-serving-cert\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.058987 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b3032dfa-2d47-4afb-870f-244eeace9aa2-audit-policies\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.059299 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b17e4ccd-f166-4933-99ec-ef4a0445ef30-console-serving-cert\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.059331 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/0b3da8f2-2160-4e82-94fa-a44757b4a481-available-featuregates\") pod \"openshift-config-operator-7777fb866f-sk6hx\" (UID: \"0b3da8f2-2160-4e82-94fa-a44757b4a481\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.060019 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4b5d227e-1a6b-466b-b380-1e5f7d407e0f-images\") pod \"machine-api-operator-5694c8668f-7hkr4\" (UID: \"4b5d227e-1a6b-466b-b380-1e5f7d407e0f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7hkr4" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.060534 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-config\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.060585 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kzq5s"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.061318 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-client-ca\") pod \"controller-manager-879f6c89f-n9vxz\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.062056 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.062734 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b3da8f2-2160-4e82-94fa-a44757b4a481-serving-cert\") pod \"openshift-config-operator-7777fb866f-sk6hx\" (UID: \"0b3da8f2-2160-4e82-94fa-a44757b4a481\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.063107 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.063323 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f1adaa2b-e2cf-412a-8e38-ceb15ba12637-serving-cert\") pod \"authentication-operator-69f744f599-65qt8\" (UID: \"f1adaa2b-e2cf-412a-8e38-ceb15ba12637\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.063455 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-encryption-config\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.063735 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-audit-policies\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.063884 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b17e4ccd-f166-4933-99ec-ef4a0445ef30-service-ca\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.064079 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.064281 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-serving-cert\") pod \"controller-manager-879f6c89f-n9vxz\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.064805 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.064934 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b17e4ccd-f166-4933-99ec-ef4a0445ef30-console-oauth-config\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.065002 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/4b5d227e-1a6b-466b-b380-1e5f7d407e0f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7hkr4\" (UID: \"4b5d227e-1a6b-466b-b380-1e5f7d407e0f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7hkr4" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.065828 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/68ad885b-9cc5-4361-877d-bdf7e1934c7e-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mb78g\" (UID: \"68ad885b-9cc5-4361-877d-bdf7e1934c7e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mb78g" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.065894 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.067142 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.067318 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.067844 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-serving-cert\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.070006 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-c6s86"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.070249 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.070577 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.072211 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-s8r6x"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.073817 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.074413 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-7hkr4"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.076305 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.078511 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.080745 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-vmh47"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.081610 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-vmh47" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.083508 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.085145 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-hq4j2"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.087796 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.089667 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-vmh47"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.091682 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-s8r6x"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.093741 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-z4pwz"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.095424 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j77qb"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.097840 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-k278s"] Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.098395 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-k278s" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.102898 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.122694 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.141894 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142166 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33484a8a-7b9c-4faa-901a-666830edd1f1-config\") pod \"console-operator-58897d9998-sqk2g\" (UID: \"33484a8a-7b9c-4faa-901a-666830edd1f1\") " pod="openshift-console-operator/console-operator-58897d9998-sqk2g" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142198 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1-webhook-cert\") pod \"packageserver-d55dfcdfc-pn2ck\" (UID: \"8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142217 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-serving-cert\") pod \"route-controller-manager-6576b87f9c-nmrnw\" (UID: \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142239 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/454295a5-405c-4698-9fc0-0fda6968cc99-config\") pod \"kube-controller-manager-operator-78b949d7b-vb4dp\" (UID: \"454295a5-405c-4698-9fc0-0fda6968cc99\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vb4dp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142258 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n55k4\" (UniqueName: \"kubernetes.io/projected/fddc4425-bbc6-4907-a27a-d76661f26708-kube-api-access-n55k4\") pod \"dns-operator-744455d44c-cd9xl\" (UID: \"fddc4425-bbc6-4907-a27a-d76661f26708\") " pod="openshift-dns-operator/dns-operator-744455d44c-cd9xl" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142275 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bc24762a-782b-43e4-a603-d3db1f587e02-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-pz29p\" (UID: \"bc24762a-782b-43e4-a603-d3db1f587e02\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142668 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/454295a5-405c-4698-9fc0-0fda6968cc99-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-vb4dp\" (UID: \"454295a5-405c-4698-9fc0-0fda6968cc99\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vb4dp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142694 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/47cf3417-8f00-44ea-82ca-5d60401f3754-default-certificate\") pod \"router-default-5444994796-jrkxw\" (UID: \"47cf3417-8f00-44ea-82ca-5d60401f3754\") " pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142710 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/d48d710f-a4a0-402e-b403-58577c79294d-signing-cabundle\") pod \"service-ca-9c57cc56f-s27jw\" (UID: \"d48d710f-a4a0-402e-b403-58577c79294d\") " pod="openshift-service-ca/service-ca-9c57cc56f-s27jw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142724 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/454295a5-405c-4698-9fc0-0fda6968cc99-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-vb4dp\" (UID: \"454295a5-405c-4698-9fc0-0fda6968cc99\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vb4dp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142762 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed31fc96-34c7-4136-94fb-e2a0a41e0589-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6cxd\" (UID: \"ed31fc96-34c7-4136-94fb-e2a0a41e0589\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6cxd" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142784 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/fddc4425-bbc6-4907-a27a-d76661f26708-metrics-tls\") pod \"dns-operator-744455d44c-cd9xl\" (UID: \"fddc4425-bbc6-4907-a27a-d76661f26708\") " pod="openshift-dns-operator/dns-operator-744455d44c-cd9xl" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142801 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwrq7\" (UniqueName: \"kubernetes.io/projected/bc24762a-782b-43e4-a603-d3db1f587e02-kube-api-access-pwrq7\") pod \"cluster-image-registry-operator-dc59b4c8b-pz29p\" (UID: \"bc24762a-782b-43e4-a603-d3db1f587e02\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142820 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/8db19a4a-1b83-42c6-8c28-82d91a923903-images\") pod \"machine-config-operator-74547568cd-lg22l\" (UID: \"8db19a4a-1b83-42c6-8c28-82d91a923903\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142858 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8db19a4a-1b83-42c6-8c28-82d91a923903-proxy-tls\") pod \"machine-config-operator-74547568cd-lg22l\" (UID: \"8db19a4a-1b83-42c6-8c28-82d91a923903\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142877 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdd4cfba-19fe-4e2d-bf3a-12732fcad83c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cqqjk\" (UID: \"cdd4cfba-19fe-4e2d-bf3a-12732fcad83c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cqqjk" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142901 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9d95a4c1-33fb-4dcd-83eb-5d4e0c666651-srv-cert\") pod \"olm-operator-6b444d44fb-bfsfw\" (UID: \"9d95a4c1-33fb-4dcd-83eb-5d4e0c666651\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142951 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bc24762a-782b-43e4-a603-d3db1f587e02-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-pz29p\" (UID: \"bc24762a-782b-43e4-a603-d3db1f587e02\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.142992 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5666\" (UniqueName: \"kubernetes.io/projected/021efb9a-dc27-4590-b85f-9d8be1dac72a-kube-api-access-x5666\") pod \"machine-config-controller-84d6567774-tk2jz\" (UID: \"021efb9a-dc27-4590-b85f-9d8be1dac72a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk2jz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.143013 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/021efb9a-dc27-4590-b85f-9d8be1dac72a-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-tk2jz\" (UID: \"021efb9a-dc27-4590-b85f-9d8be1dac72a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk2jz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.143051 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33484a8a-7b9c-4faa-901a-666830edd1f1-config\") pod \"console-operator-58897d9998-sqk2g\" (UID: \"33484a8a-7b9c-4faa-901a-666830edd1f1\") " pod="openshift-console-operator/console-operator-58897d9998-sqk2g" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.143933 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/021efb9a-dc27-4590-b85f-9d8be1dac72a-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-tk2jz\" (UID: \"021efb9a-dc27-4590-b85f-9d8be1dac72a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk2jz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.144025 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bc24762a-782b-43e4-a603-d3db1f587e02-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-pz29p\" (UID: \"bc24762a-782b-43e4-a603-d3db1f587e02\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.144080 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmvsd\" (UniqueName: \"kubernetes.io/projected/d8dfe303-3867-4258-854b-c4655768faeb-kube-api-access-bmvsd\") pod \"multus-admission-controller-857f4d67dd-fgz8k\" (UID: \"d8dfe303-3867-4258-854b-c4655768faeb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fgz8k" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.144128 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/52e5d554-be8b-4312-a58c-67be24e6d340-profile-collector-cert\") pod \"catalog-operator-68c6474976-jdktz\" (UID: \"52e5d554-be8b-4312-a58c-67be24e6d340\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.144146 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/33484a8a-7b9c-4faa-901a-666830edd1f1-trusted-ca\") pod \"console-operator-58897d9998-sqk2g\" (UID: \"33484a8a-7b9c-4faa-901a-666830edd1f1\") " pod="openshift-console-operator/console-operator-58897d9998-sqk2g" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.144167 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f1f4765d-f9d8-4590-99b4-e1e0823424cd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kzq5s\" (UID: \"f1f4765d-f9d8-4590-99b4-e1e0823424cd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kzq5s" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.144264 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/87a7e109-865a-444f-8e06-ba8a6ff6e6e8-etcd-service-ca\") pod \"etcd-operator-b45778765-hq4j2\" (UID: \"87a7e109-865a-444f-8e06-ba8a6ff6e6e8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.144826 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/87a7e109-865a-444f-8e06-ba8a6ff6e6e8-etcd-service-ca\") pod \"etcd-operator-b45778765-hq4j2\" (UID: \"87a7e109-865a-444f-8e06-ba8a6ff6e6e8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.145010 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/33484a8a-7b9c-4faa-901a-666830edd1f1-trusted-ca\") pod \"console-operator-58897d9998-sqk2g\" (UID: \"33484a8a-7b9c-4faa-901a-666830edd1f1\") " pod="openshift-console-operator/console-operator-58897d9998-sqk2g" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.145528 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbzgh\" (UniqueName: \"kubernetes.io/projected/15854168-726d-44b5-80e7-d1ca941c2941-kube-api-access-rbzgh\") pod \"downloads-7954f5f757-fw72r\" (UID: \"15854168-726d-44b5-80e7-d1ca941c2941\") " pod="openshift-console/downloads-7954f5f757-fw72r" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.145580 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lfr5\" (UniqueName: \"kubernetes.io/projected/52e5d554-be8b-4312-a58c-67be24e6d340-kube-api-access-8lfr5\") pod \"catalog-operator-68c6474976-jdktz\" (UID: \"52e5d554-be8b-4312-a58c-67be24e6d340\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.145599 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d037ebb5-19ab-471e-b627-3b0487dfa12c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-z4pwz\" (UID: \"d037ebb5-19ab-471e-b627-3b0487dfa12c\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.145638 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/47cf3417-8f00-44ea-82ca-5d60401f3754-default-certificate\") pod \"router-default-5444994796-jrkxw\" (UID: \"47cf3417-8f00-44ea-82ca-5d60401f3754\") " pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.145731 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kt5m\" (UniqueName: \"kubernetes.io/projected/a43d8c55-d59d-4c9e-9da5-6b333f0916a8-kube-api-access-4kt5m\") pod \"service-ca-operator-777779d784-ktfkp\" (UID: \"a43d8c55-d59d-4c9e-9da5-6b333f0916a8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.145755 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8slcq\" (UniqueName: \"kubernetes.io/projected/da69e256-e90c-4fde-94f6-d08522e2f3da-kube-api-access-8slcq\") pod \"collect-profiles-29399550-lxjc5\" (UID: \"da69e256-e90c-4fde-94f6-d08522e2f3da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.145787 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmm84\" (UniqueName: \"kubernetes.io/projected/be8de0a9-f2bf-4b42-8117-6c31ee72abeb-kube-api-access-nmm84\") pod \"ingress-operator-5b745b69d9-rphbq\" (UID: \"be8de0a9-f2bf-4b42-8117-6c31ee72abeb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.145823 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/be8de0a9-f2bf-4b42-8117-6c31ee72abeb-trusted-ca\") pod \"ingress-operator-5b745b69d9-rphbq\" (UID: \"be8de0a9-f2bf-4b42-8117-6c31ee72abeb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.145861 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a79c2f3-4280-485b-80ea-239298e165f3-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-p6k5z\" (UID: \"8a79c2f3-4280-485b-80ea-239298e165f3\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-p6k5z" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.145887 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da69e256-e90c-4fde-94f6-d08522e2f3da-config-volume\") pod \"collect-profiles-29399550-lxjc5\" (UID: \"da69e256-e90c-4fde-94f6-d08522e2f3da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.145904 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/da69e256-e90c-4fde-94f6-d08522e2f3da-secret-volume\") pod \"collect-profiles-29399550-lxjc5\" (UID: \"da69e256-e90c-4fde-94f6-d08522e2f3da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.145921 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98d95fdd-1d35-40d5-bb23-7c605ea5e392-config\") pod \"kube-apiserver-operator-766d6c64bb-8rtrx\" (UID: \"98d95fdd-1d35-40d5-bb23-7c605ea5e392\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8rtrx" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.145936 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a79c2f3-4280-485b-80ea-239298e165f3-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-p6k5z\" (UID: \"8a79c2f3-4280-485b-80ea-239298e165f3\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-p6k5z" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.145953 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87a7e109-865a-444f-8e06-ba8a6ff6e6e8-config\") pod \"etcd-operator-b45778765-hq4j2\" (UID: \"87a7e109-865a-444f-8e06-ba8a6ff6e6e8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.145984 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22fwh\" (UniqueName: \"kubernetes.io/projected/f1f4765d-f9d8-4590-99b4-e1e0823424cd-kube-api-access-22fwh\") pod \"control-plane-machine-set-operator-78cbb6b69f-kzq5s\" (UID: \"f1f4765d-f9d8-4590-99b4-e1e0823424cd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kzq5s" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146002 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d8dfe303-3867-4258-854b-c4655768faeb-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-fgz8k\" (UID: \"d8dfe303-3867-4258-854b-c4655768faeb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fgz8k" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146022 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d037ebb5-19ab-471e-b627-3b0487dfa12c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-z4pwz\" (UID: \"d037ebb5-19ab-471e-b627-3b0487dfa12c\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146037 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/d48d710f-a4a0-402e-b403-58577c79294d-signing-key\") pod \"service-ca-9c57cc56f-s27jw\" (UID: \"d48d710f-a4a0-402e-b403-58577c79294d\") " pod="openshift-service-ca/service-ca-9c57cc56f-s27jw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146056 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a43d8c55-d59d-4c9e-9da5-6b333f0916a8-serving-cert\") pod \"service-ca-operator-777779d784-ktfkp\" (UID: \"a43d8c55-d59d-4c9e-9da5-6b333f0916a8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146080 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-config\") pod \"route-controller-manager-6576b87f9c-nmrnw\" (UID: \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146098 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk8zp\" (UniqueName: \"kubernetes.io/projected/9d95a4c1-33fb-4dcd-83eb-5d4e0c666651-kube-api-access-pk8zp\") pod \"olm-operator-6b444d44fb-bfsfw\" (UID: \"9d95a4c1-33fb-4dcd-83eb-5d4e0c666651\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146117 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8db19a4a-1b83-42c6-8c28-82d91a923903-auth-proxy-config\") pod \"machine-config-operator-74547568cd-lg22l\" (UID: \"8db19a4a-1b83-42c6-8c28-82d91a923903\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146134 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/87a7e109-865a-444f-8e06-ba8a6ff6e6e8-etcd-ca\") pod \"etcd-operator-b45778765-hq4j2\" (UID: \"87a7e109-865a-444f-8e06-ba8a6ff6e6e8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146151 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/be8de0a9-f2bf-4b42-8117-6c31ee72abeb-bound-sa-token\") pod \"ingress-operator-5b745b69d9-rphbq\" (UID: \"be8de0a9-f2bf-4b42-8117-6c31ee72abeb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146175 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1-tmpfs\") pod \"packageserver-d55dfcdfc-pn2ck\" (UID: \"8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146193 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/98d95fdd-1d35-40d5-bb23-7c605ea5e392-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-8rtrx\" (UID: \"98d95fdd-1d35-40d5-bb23-7c605ea5e392\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8rtrx" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146209 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cdcx\" (UniqueName: \"kubernetes.io/projected/87a7e109-865a-444f-8e06-ba8a6ff6e6e8-kube-api-access-7cdcx\") pod \"etcd-operator-b45778765-hq4j2\" (UID: \"87a7e109-865a-444f-8e06-ba8a6ff6e6e8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146227 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cj5wx\" (UniqueName: \"kubernetes.io/projected/d48d710f-a4a0-402e-b403-58577c79294d-kube-api-access-cj5wx\") pod \"service-ca-9c57cc56f-s27jw\" (UID: \"d48d710f-a4a0-402e-b403-58577c79294d\") " pod="openshift-service-ca/service-ca-9c57cc56f-s27jw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146244 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/52e5d554-be8b-4312-a58c-67be24e6d340-srv-cert\") pod \"catalog-operator-68c6474976-jdktz\" (UID: \"52e5d554-be8b-4312-a58c-67be24e6d340\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146258 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/be8de0a9-f2bf-4b42-8117-6c31ee72abeb-metrics-tls\") pod \"ingress-operator-5b745b69d9-rphbq\" (UID: \"be8de0a9-f2bf-4b42-8117-6c31ee72abeb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146282 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1-apiservice-cert\") pod \"packageserver-d55dfcdfc-pn2ck\" (UID: \"8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146300 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbkgh\" (UniqueName: \"kubernetes.io/projected/8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1-kube-api-access-dbkgh\") pod \"packageserver-d55dfcdfc-pn2ck\" (UID: \"8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146321 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdd4cfba-19fe-4e2d-bf3a-12732fcad83c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cqqjk\" (UID: \"cdd4cfba-19fe-4e2d-bf3a-12732fcad83c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cqqjk" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146336 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/87a7e109-865a-444f-8e06-ba8a6ff6e6e8-etcd-client\") pod \"etcd-operator-b45778765-hq4j2\" (UID: \"87a7e109-865a-444f-8e06-ba8a6ff6e6e8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146355 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fs6zk\" (UniqueName: \"kubernetes.io/projected/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-kube-api-access-fs6zk\") pod \"route-controller-manager-6576b87f9c-nmrnw\" (UID: \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146377 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/47cf3417-8f00-44ea-82ca-5d60401f3754-stats-auth\") pod \"router-default-5444994796-jrkxw\" (UID: \"47cf3417-8f00-44ea-82ca-5d60401f3754\") " pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146391 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9d95a4c1-33fb-4dcd-83eb-5d4e0c666651-profile-collector-cert\") pod \"olm-operator-6b444d44fb-bfsfw\" (UID: \"9d95a4c1-33fb-4dcd-83eb-5d4e0c666651\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146410 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46kgn\" (UniqueName: \"kubernetes.io/projected/33484a8a-7b9c-4faa-901a-666830edd1f1-kube-api-access-46kgn\") pod \"console-operator-58897d9998-sqk2g\" (UID: \"33484a8a-7b9c-4faa-901a-666830edd1f1\") " pod="openshift-console-operator/console-operator-58897d9998-sqk2g" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146442 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed31fc96-34c7-4136-94fb-e2a0a41e0589-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6cxd\" (UID: \"ed31fc96-34c7-4136-94fb-e2a0a41e0589\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6cxd" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146459 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-client-ca\") pod \"route-controller-manager-6576b87f9c-nmrnw\" (UID: \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146478 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vt2wq\" (UniqueName: \"kubernetes.io/projected/c0272bee-0b74-4b1b-80c4-213d866d6479-kube-api-access-vt2wq\") pod \"migrator-59844c95c7-v4765\" (UID: \"c0272bee-0b74-4b1b-80c4-213d866d6479\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-v4765" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146494 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2vzl\" (UniqueName: \"kubernetes.io/projected/8a79c2f3-4280-485b-80ea-239298e165f3-kube-api-access-m2vzl\") pod \"openshift-controller-manager-operator-756b6f6bc6-p6k5z\" (UID: \"8a79c2f3-4280-485b-80ea-239298e165f3\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-p6k5z" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146546 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33484a8a-7b9c-4faa-901a-666830edd1f1-serving-cert\") pod \"console-operator-58897d9998-sqk2g\" (UID: \"33484a8a-7b9c-4faa-901a-666830edd1f1\") " pod="openshift-console-operator/console-operator-58897d9998-sqk2g" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146568 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/47cf3417-8f00-44ea-82ca-5d60401f3754-service-ca-bundle\") pod \"router-default-5444994796-jrkxw\" (UID: \"47cf3417-8f00-44ea-82ca-5d60401f3754\") " pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146587 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/98d95fdd-1d35-40d5-bb23-7c605ea5e392-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-8rtrx\" (UID: \"98d95fdd-1d35-40d5-bb23-7c605ea5e392\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8rtrx" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146613 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a43d8c55-d59d-4c9e-9da5-6b333f0916a8-config\") pod \"service-ca-operator-777779d784-ktfkp\" (UID: \"a43d8c55-d59d-4c9e-9da5-6b333f0916a8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146637 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/021efb9a-dc27-4590-b85f-9d8be1dac72a-proxy-tls\") pod \"machine-config-controller-84d6567774-tk2jz\" (UID: \"021efb9a-dc27-4590-b85f-9d8be1dac72a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk2jz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146662 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ss9zd\" (UniqueName: \"kubernetes.io/projected/ed31fc96-34c7-4136-94fb-e2a0a41e0589-kube-api-access-ss9zd\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6cxd\" (UID: \"ed31fc96-34c7-4136-94fb-e2a0a41e0589\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6cxd" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146683 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-494qt\" (UniqueName: \"kubernetes.io/projected/d037ebb5-19ab-471e-b627-3b0487dfa12c-kube-api-access-494qt\") pod \"marketplace-operator-79b997595-z4pwz\" (UID: \"d037ebb5-19ab-471e-b627-3b0487dfa12c\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146706 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bf4m\" (UniqueName: \"kubernetes.io/projected/8db19a4a-1b83-42c6-8c28-82d91a923903-kube-api-access-7bf4m\") pod \"machine-config-operator-74547568cd-lg22l\" (UID: \"8db19a4a-1b83-42c6-8c28-82d91a923903\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146725 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cdd4cfba-19fe-4e2d-bf3a-12732fcad83c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cqqjk\" (UID: \"cdd4cfba-19fe-4e2d-bf3a-12732fcad83c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cqqjk" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146752 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bc24762a-782b-43e4-a603-d3db1f587e02-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-pz29p\" (UID: \"bc24762a-782b-43e4-a603-d3db1f587e02\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146770 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/47cf3417-8f00-44ea-82ca-5d60401f3754-metrics-certs\") pod \"router-default-5444994796-jrkxw\" (UID: \"47cf3417-8f00-44ea-82ca-5d60401f3754\") " pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146788 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87a7e109-865a-444f-8e06-ba8a6ff6e6e8-serving-cert\") pod \"etcd-operator-b45778765-hq4j2\" (UID: \"87a7e109-865a-444f-8e06-ba8a6ff6e6e8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146806 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w66r9\" (UniqueName: \"kubernetes.io/projected/47cf3417-8f00-44ea-82ca-5d60401f3754-kube-api-access-w66r9\") pod \"router-default-5444994796-jrkxw\" (UID: \"47cf3417-8f00-44ea-82ca-5d60401f3754\") " pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146829 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87a7e109-865a-444f-8e06-ba8a6ff6e6e8-config\") pod \"etcd-operator-b45778765-hq4j2\" (UID: \"87a7e109-865a-444f-8e06-ba8a6ff6e6e8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.146837 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a79c2f3-4280-485b-80ea-239298e165f3-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-p6k5z\" (UID: \"8a79c2f3-4280-485b-80ea-239298e165f3\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-p6k5z" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.147068 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/87a7e109-865a-444f-8e06-ba8a6ff6e6e8-etcd-ca\") pod \"etcd-operator-b45778765-hq4j2\" (UID: \"87a7e109-865a-444f-8e06-ba8a6ff6e6e8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.147517 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/47cf3417-8f00-44ea-82ca-5d60401f3754-service-ca-bundle\") pod \"router-default-5444994796-jrkxw\" (UID: \"47cf3417-8f00-44ea-82ca-5d60401f3754\") " pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.147987 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1-tmpfs\") pod \"packageserver-d55dfcdfc-pn2ck\" (UID: \"8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.148091 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8db19a4a-1b83-42c6-8c28-82d91a923903-auth-proxy-config\") pod \"machine-config-operator-74547568cd-lg22l\" (UID: \"8db19a4a-1b83-42c6-8c28-82d91a923903\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.148844 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bc24762a-782b-43e4-a603-d3db1f587e02-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-pz29p\" (UID: \"bc24762a-782b-43e4-a603-d3db1f587e02\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.149751 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87a7e109-865a-444f-8e06-ba8a6ff6e6e8-serving-cert\") pod \"etcd-operator-b45778765-hq4j2\" (UID: \"87a7e109-865a-444f-8e06-ba8a6ff6e6e8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.149955 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33484a8a-7b9c-4faa-901a-666830edd1f1-serving-cert\") pod \"console-operator-58897d9998-sqk2g\" (UID: \"33484a8a-7b9c-4faa-901a-666830edd1f1\") " pod="openshift-console-operator/console-operator-58897d9998-sqk2g" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.150151 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a79c2f3-4280-485b-80ea-239298e165f3-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-p6k5z\" (UID: \"8a79c2f3-4280-485b-80ea-239298e165f3\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-p6k5z" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.150188 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/87a7e109-865a-444f-8e06-ba8a6ff6e6e8-etcd-client\") pod \"etcd-operator-b45778765-hq4j2\" (UID: \"87a7e109-865a-444f-8e06-ba8a6ff6e6e8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.150566 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/47cf3417-8f00-44ea-82ca-5d60401f3754-metrics-certs\") pod \"router-default-5444994796-jrkxw\" (UID: \"47cf3417-8f00-44ea-82ca-5d60401f3754\") " pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.151149 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/47cf3417-8f00-44ea-82ca-5d60401f3754-stats-auth\") pod \"router-default-5444994796-jrkxw\" (UID: \"47cf3417-8f00-44ea-82ca-5d60401f3754\") " pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.162908 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.181935 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.185703 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/fddc4425-bbc6-4907-a27a-d76661f26708-metrics-tls\") pod \"dns-operator-744455d44c-cd9xl\" (UID: \"fddc4425-bbc6-4907-a27a-d76661f26708\") " pod="openshift-dns-operator/dns-operator-744455d44c-cd9xl" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.204494 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.222923 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.242167 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.262206 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.282702 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.302796 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.321964 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.329993 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/021efb9a-dc27-4590-b85f-9d8be1dac72a-proxy-tls\") pod \"machine-config-controller-84d6567774-tk2jz\" (UID: \"021efb9a-dc27-4590-b85f-9d8be1dac72a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk2jz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.342253 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.363012 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.370837 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdd4cfba-19fe-4e2d-bf3a-12732fcad83c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cqqjk\" (UID: \"cdd4cfba-19fe-4e2d-bf3a-12732fcad83c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cqqjk" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.382753 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.383495 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdd4cfba-19fe-4e2d-bf3a-12732fcad83c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cqqjk\" (UID: \"cdd4cfba-19fe-4e2d-bf3a-12732fcad83c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cqqjk" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.422771 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.435620 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/454295a5-405c-4698-9fc0-0fda6968cc99-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-vb4dp\" (UID: \"454295a5-405c-4698-9fc0-0fda6968cc99\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vb4dp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.442519 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.462242 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.463207 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/454295a5-405c-4698-9fc0-0fda6968cc99-config\") pod \"kube-controller-manager-operator-78b949d7b-vb4dp\" (UID: \"454295a5-405c-4698-9fc0-0fda6968cc99\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vb4dp" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.482373 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.502171 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.522330 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.525882 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-serving-cert\") pod \"route-controller-manager-6576b87f9c-nmrnw\" (UID: \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.543585 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.563043 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.569132 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d8dfe303-3867-4258-854b-c4655768faeb-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-fgz8k\" (UID: \"d8dfe303-3867-4258-854b-c4655768faeb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fgz8k" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.582802 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.602880 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.608376 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-client-ca\") pod \"route-controller-manager-6576b87f9c-nmrnw\" (UID: \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.622760 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.628300 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-config\") pod \"route-controller-manager-6576b87f9c-nmrnw\" (UID: \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.642673 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.661932 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.682723 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.701696 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.714856 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/52e5d554-be8b-4312-a58c-67be24e6d340-srv-cert\") pod \"catalog-operator-68c6474976-jdktz\" (UID: \"52e5d554-be8b-4312-a58c-67be24e6d340\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.721751 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.727442 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/52e5d554-be8b-4312-a58c-67be24e6d340-profile-collector-cert\") pod \"catalog-operator-68c6474976-jdktz\" (UID: \"52e5d554-be8b-4312-a58c-67be24e6d340\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.730187 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/da69e256-e90c-4fde-94f6-d08522e2f3da-secret-volume\") pod \"collect-profiles-29399550-lxjc5\" (UID: \"da69e256-e90c-4fde-94f6-d08522e2f3da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.730928 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9d95a4c1-33fb-4dcd-83eb-5d4e0c666651-profile-collector-cert\") pod \"olm-operator-6b444d44fb-bfsfw\" (UID: \"9d95a4c1-33fb-4dcd-83eb-5d4e0c666651\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.743842 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.762782 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.782164 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.802659 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.810843 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/be8de0a9-f2bf-4b42-8117-6c31ee72abeb-metrics-tls\") pod \"ingress-operator-5b745b69d9-rphbq\" (UID: \"be8de0a9-f2bf-4b42-8117-6c31ee72abeb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.827662 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.838078 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/be8de0a9-f2bf-4b42-8117-6c31ee72abeb-trusted-ca\") pod \"ingress-operator-5b745b69d9-rphbq\" (UID: \"be8de0a9-f2bf-4b42-8117-6c31ee72abeb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.842838 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.862340 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.864062 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/d48d710f-a4a0-402e-b403-58577c79294d-signing-cabundle\") pod \"service-ca-9c57cc56f-s27jw\" (UID: \"d48d710f-a4a0-402e-b403-58577c79294d\") " pod="openshift-service-ca/service-ca-9c57cc56f-s27jw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.884388 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.901718 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.922637 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.926891 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da69e256-e90c-4fde-94f6-d08522e2f3da-config-volume\") pod \"collect-profiles-29399550-lxjc5\" (UID: \"da69e256-e90c-4fde-94f6-d08522e2f3da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.942407 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.949458 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/d48d710f-a4a0-402e-b403-58577c79294d-signing-key\") pod \"service-ca-9c57cc56f-s27jw\" (UID: \"d48d710f-a4a0-402e-b403-58577c79294d\") " pod="openshift-service-ca/service-ca-9c57cc56f-s27jw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.961158 4718 request.go:700] Waited for 1.00778492s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/secrets?fieldSelector=metadata.name%3Dolm-operator-serving-cert&limit=500&resourceVersion=0 Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.962315 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.965891 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9d95a4c1-33fb-4dcd-83eb-5d4e0c666651-srv-cert\") pod \"olm-operator-6b444d44fb-bfsfw\" (UID: \"9d95a4c1-33fb-4dcd-83eb-5d4e0c666651\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" Nov 24 08:37:48 crc kubenswrapper[4718]: I1124 08:37:48.982189 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.002213 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.023828 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.031046 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/98d95fdd-1d35-40d5-bb23-7c605ea5e392-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-8rtrx\" (UID: \"98d95fdd-1d35-40d5-bb23-7c605ea5e392\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8rtrx" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.043220 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.047330 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98d95fdd-1d35-40d5-bb23-7c605ea5e392-config\") pod \"kube-apiserver-operator-766d6c64bb-8rtrx\" (UID: \"98d95fdd-1d35-40d5-bb23-7c605ea5e392\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8rtrx" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.065024 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.074459 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/8db19a4a-1b83-42c6-8c28-82d91a923903-images\") pod \"machine-config-operator-74547568cd-lg22l\" (UID: \"8db19a4a-1b83-42c6-8c28-82d91a923903\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.082327 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.102452 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.122777 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.142245 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Nov 24 08:37:49 crc kubenswrapper[4718]: E1124 08:37:49.142444 4718 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/packageserver-service-cert: failed to sync secret cache: timed out waiting for the condition Nov 24 08:37:49 crc kubenswrapper[4718]: E1124 08:37:49.142522 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1-webhook-cert podName:8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1 nodeName:}" failed. No retries permitted until 2025-11-24 08:37:49.64249759 +0000 UTC m=+141.758788494 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-cert" (UniqueName: "kubernetes.io/secret/8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1-webhook-cert") pod "packageserver-d55dfcdfc-pn2ck" (UID: "8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1") : failed to sync secret cache: timed out waiting for the condition Nov 24 08:37:49 crc kubenswrapper[4718]: E1124 08:37:49.143174 4718 secret.go:188] Couldn't get secret openshift-machine-config-operator/mco-proxy-tls: failed to sync secret cache: timed out waiting for the condition Nov 24 08:37:49 crc kubenswrapper[4718]: E1124 08:37:49.143197 4718 secret.go:188] Couldn't get secret openshift-kube-storage-version-migrator-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Nov 24 08:37:49 crc kubenswrapper[4718]: E1124 08:37:49.143232 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8db19a4a-1b83-42c6-8c28-82d91a923903-proxy-tls podName:8db19a4a-1b83-42c6-8c28-82d91a923903 nodeName:}" failed. No retries permitted until 2025-11-24 08:37:49.64321614 +0000 UTC m=+141.759507044 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/8db19a4a-1b83-42c6-8c28-82d91a923903-proxy-tls") pod "machine-config-operator-74547568cd-lg22l" (UID: "8db19a4a-1b83-42c6-8c28-82d91a923903") : failed to sync secret cache: timed out waiting for the condition Nov 24 08:37:49 crc kubenswrapper[4718]: E1124 08:37:49.143436 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ed31fc96-34c7-4136-94fb-e2a0a41e0589-serving-cert podName:ed31fc96-34c7-4136-94fb-e2a0a41e0589 nodeName:}" failed. No retries permitted until 2025-11-24 08:37:49.643401236 +0000 UTC m=+141.759692230 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/ed31fc96-34c7-4136-94fb-e2a0a41e0589-serving-cert") pod "kube-storage-version-migrator-operator-b67b599dd-f6cxd" (UID: "ed31fc96-34c7-4136-94fb-e2a0a41e0589") : failed to sync secret cache: timed out waiting for the condition Nov 24 08:37:49 crc kubenswrapper[4718]: E1124 08:37:49.145005 4718 secret.go:188] Couldn't get secret openshift-machine-api/control-plane-machine-set-operator-tls: failed to sync secret cache: timed out waiting for the condition Nov 24 08:37:49 crc kubenswrapper[4718]: E1124 08:37:49.145065 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f1f4765d-f9d8-4590-99b4-e1e0823424cd-control-plane-machine-set-operator-tls podName:f1f4765d-f9d8-4590-99b4-e1e0823424cd nodeName:}" failed. No retries permitted until 2025-11-24 08:37:49.645049192 +0000 UTC m=+141.761340306 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "control-plane-machine-set-operator-tls" (UniqueName: "kubernetes.io/secret/f1f4765d-f9d8-4590-99b4-e1e0823424cd-control-plane-machine-set-operator-tls") pod "control-plane-machine-set-operator-78cbb6b69f-kzq5s" (UID: "f1f4765d-f9d8-4590-99b4-e1e0823424cd") : failed to sync secret cache: timed out waiting for the condition Nov 24 08:37:49 crc kubenswrapper[4718]: E1124 08:37:49.146629 4718 secret.go:188] Couldn't get secret openshift-marketplace/marketplace-operator-metrics: failed to sync secret cache: timed out waiting for the condition Nov 24 08:37:49 crc kubenswrapper[4718]: E1124 08:37:49.146653 4718 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/packageserver-service-cert: failed to sync secret cache: timed out waiting for the condition Nov 24 08:37:49 crc kubenswrapper[4718]: E1124 08:37:49.146700 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1-apiservice-cert podName:8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1 nodeName:}" failed. No retries permitted until 2025-11-24 08:37:49.646687869 +0000 UTC m=+141.762978993 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "apiservice-cert" (UniqueName: "kubernetes.io/secret/8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1-apiservice-cert") pod "packageserver-d55dfcdfc-pn2ck" (UID: "8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1") : failed to sync secret cache: timed out waiting for the condition Nov 24 08:37:49 crc kubenswrapper[4718]: E1124 08:37:49.146716 4718 secret.go:188] Couldn't get secret openshift-service-ca-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Nov 24 08:37:49 crc kubenswrapper[4718]: E1124 08:37:49.146724 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d037ebb5-19ab-471e-b627-3b0487dfa12c-marketplace-operator-metrics podName:d037ebb5-19ab-471e-b627-3b0487dfa12c nodeName:}" failed. No retries permitted until 2025-11-24 08:37:49.646710819 +0000 UTC m=+141.763001963 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-operator-metrics" (UniqueName: "kubernetes.io/secret/d037ebb5-19ab-471e-b627-3b0487dfa12c-marketplace-operator-metrics") pod "marketplace-operator-79b997595-z4pwz" (UID: "d037ebb5-19ab-471e-b627-3b0487dfa12c") : failed to sync secret cache: timed out waiting for the condition Nov 24 08:37:49 crc kubenswrapper[4718]: E1124 08:37:49.146755 4718 configmap.go:193] Couldn't get configMap openshift-marketplace/marketplace-trusted-ca: failed to sync configmap cache: timed out waiting for the condition Nov 24 08:37:49 crc kubenswrapper[4718]: E1124 08:37:49.146765 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a43d8c55-d59d-4c9e-9da5-6b333f0916a8-serving-cert podName:a43d8c55-d59d-4c9e-9da5-6b333f0916a8 nodeName:}" failed. No retries permitted until 2025-11-24 08:37:49.64675137 +0000 UTC m=+141.763042274 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/a43d8c55-d59d-4c9e-9da5-6b333f0916a8-serving-cert") pod "service-ca-operator-777779d784-ktfkp" (UID: "a43d8c55-d59d-4c9e-9da5-6b333f0916a8") : failed to sync secret cache: timed out waiting for the condition Nov 24 08:37:49 crc kubenswrapper[4718]: E1124 08:37:49.146803 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d037ebb5-19ab-471e-b627-3b0487dfa12c-marketplace-trusted-ca podName:d037ebb5-19ab-471e-b627-3b0487dfa12c nodeName:}" failed. No retries permitted until 2025-11-24 08:37:49.646782701 +0000 UTC m=+141.763073605 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-trusted-ca" (UniqueName: "kubernetes.io/configmap/d037ebb5-19ab-471e-b627-3b0487dfa12c-marketplace-trusted-ca") pod "marketplace-operator-79b997595-z4pwz" (UID: "d037ebb5-19ab-471e-b627-3b0487dfa12c") : failed to sync configmap cache: timed out waiting for the condition Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.147493 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed31fc96-34c7-4136-94fb-e2a0a41e0589-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6cxd\" (UID: \"ed31fc96-34c7-4136-94fb-e2a0a41e0589\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6cxd" Nov 24 08:37:49 crc kubenswrapper[4718]: E1124 08:37:49.147588 4718 configmap.go:193] Couldn't get configMap openshift-service-ca-operator/service-ca-operator-config: failed to sync configmap cache: timed out waiting for the condition Nov 24 08:37:49 crc kubenswrapper[4718]: E1124 08:37:49.148296 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a43d8c55-d59d-4c9e-9da5-6b333f0916a8-config podName:a43d8c55-d59d-4c9e-9da5-6b333f0916a8 nodeName:}" failed. No retries permitted until 2025-11-24 08:37:49.648280244 +0000 UTC m=+141.764571358 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/a43d8c55-d59d-4c9e-9da5-6b333f0916a8-config") pod "service-ca-operator-777779d784-ktfkp" (UID: "a43d8c55-d59d-4c9e-9da5-6b333f0916a8") : failed to sync configmap cache: timed out waiting for the condition Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.161591 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.183566 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.203330 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.223043 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.241958 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.262174 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.282136 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.302908 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.322647 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.342849 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.363309 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.382377 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.402530 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.422627 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.442836 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.462534 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.482048 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.502293 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.522837 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.548443 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.563016 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.582093 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.622427 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.642029 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.662524 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.670548 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8db19a4a-1b83-42c6-8c28-82d91a923903-proxy-tls\") pod \"machine-config-operator-74547568cd-lg22l\" (UID: \"8db19a4a-1b83-42c6-8c28-82d91a923903\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.670624 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f1f4765d-f9d8-4590-99b4-e1e0823424cd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kzq5s\" (UID: \"f1f4765d-f9d8-4590-99b4-e1e0823424cd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kzq5s" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.670656 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d037ebb5-19ab-471e-b627-3b0487dfa12c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-z4pwz\" (UID: \"d037ebb5-19ab-471e-b627-3b0487dfa12c\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.670725 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d037ebb5-19ab-471e-b627-3b0487dfa12c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-z4pwz\" (UID: \"d037ebb5-19ab-471e-b627-3b0487dfa12c\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.670750 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a43d8c55-d59d-4c9e-9da5-6b333f0916a8-serving-cert\") pod \"service-ca-operator-777779d784-ktfkp\" (UID: \"a43d8c55-d59d-4c9e-9da5-6b333f0916a8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.670812 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1-apiservice-cert\") pod \"packageserver-d55dfcdfc-pn2ck\" (UID: \"8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.670921 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a43d8c55-d59d-4c9e-9da5-6b333f0916a8-config\") pod \"service-ca-operator-777779d784-ktfkp\" (UID: \"a43d8c55-d59d-4c9e-9da5-6b333f0916a8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.671022 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1-webhook-cert\") pod \"packageserver-d55dfcdfc-pn2ck\" (UID: \"8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.671066 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed31fc96-34c7-4136-94fb-e2a0a41e0589-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6cxd\" (UID: \"ed31fc96-34c7-4136-94fb-e2a0a41e0589\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6cxd" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.672154 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a43d8c55-d59d-4c9e-9da5-6b333f0916a8-config\") pod \"service-ca-operator-777779d784-ktfkp\" (UID: \"a43d8c55-d59d-4c9e-9da5-6b333f0916a8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.672731 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d037ebb5-19ab-471e-b627-3b0487dfa12c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-z4pwz\" (UID: \"d037ebb5-19ab-471e-b627-3b0487dfa12c\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.674138 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f1f4765d-f9d8-4590-99b4-e1e0823424cd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kzq5s\" (UID: \"f1f4765d-f9d8-4590-99b4-e1e0823424cd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kzq5s" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.674191 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed31fc96-34c7-4136-94fb-e2a0a41e0589-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6cxd\" (UID: \"ed31fc96-34c7-4136-94fb-e2a0a41e0589\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6cxd" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.674354 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a43d8c55-d59d-4c9e-9da5-6b333f0916a8-serving-cert\") pod \"service-ca-operator-777779d784-ktfkp\" (UID: \"a43d8c55-d59d-4c9e-9da5-6b333f0916a8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.674398 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8db19a4a-1b83-42c6-8c28-82d91a923903-proxy-tls\") pod \"machine-config-operator-74547568cd-lg22l\" (UID: \"8db19a4a-1b83-42c6-8c28-82d91a923903\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.675440 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1-apiservice-cert\") pod \"packageserver-d55dfcdfc-pn2ck\" (UID: \"8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.680659 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d037ebb5-19ab-471e-b627-3b0487dfa12c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-z4pwz\" (UID: \"d037ebb5-19ab-471e-b627-3b0487dfa12c\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.683476 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1-webhook-cert\") pod \"packageserver-d55dfcdfc-pn2ck\" (UID: \"8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.697329 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cwl5\" (UniqueName: \"kubernetes.io/projected/0b3da8f2-2160-4e82-94fa-a44757b4a481-kube-api-access-2cwl5\") pod \"openshift-config-operator-7777fb866f-sk6hx\" (UID: \"0b3da8f2-2160-4e82-94fa-a44757b4a481\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.716087 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d76b2\" (UniqueName: \"kubernetes.io/projected/b3032dfa-2d47-4afb-870f-244eeace9aa2-kube-api-access-d76b2\") pod \"apiserver-7bbb656c7d-jmjcp\" (UID: \"b3032dfa-2d47-4afb-870f-244eeace9aa2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.733208 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.736785 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnbbx\" (UniqueName: \"kubernetes.io/projected/4b5d227e-1a6b-466b-b380-1e5f7d407e0f-kube-api-access-gnbbx\") pod \"machine-api-operator-5694c8668f-7hkr4\" (UID: \"4b5d227e-1a6b-466b-b380-1e5f7d407e0f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7hkr4" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.754777 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-7hkr4" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.760431 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btwkr\" (UniqueName: \"kubernetes.io/projected/7bb84203-369a-468c-9b00-c4a5650b88c8-kube-api-access-btwkr\") pod \"oauth-openshift-558db77b4-xjcpp\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.767667 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.775862 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jhtm\" (UniqueName: \"kubernetes.io/projected/575d6e01-e969-4c4b-8e9b-20f68bfc57e9-kube-api-access-6jhtm\") pod \"machine-approver-56656f9798-fc2mt\" (UID: \"575d6e01-e969-4c4b-8e9b-20f68bfc57e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.797937 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w449h\" (UniqueName: \"kubernetes.io/projected/fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d-kube-api-access-w449h\") pod \"openshift-apiserver-operator-796bbdcf4f-qq48b\" (UID: \"fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qq48b" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.802100 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qq48b" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.820854 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qqcl\" (UniqueName: \"kubernetes.io/projected/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-kube-api-access-8qqcl\") pod \"controller-manager-879f6c89f-n9vxz\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.841779 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gt6wn\" (UniqueName: \"kubernetes.io/projected/f1adaa2b-e2cf-412a-8e38-ceb15ba12637-kube-api-access-gt6wn\") pod \"authentication-operator-69f744f599-65qt8\" (UID: \"f1adaa2b-e2cf-412a-8e38-ceb15ba12637\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.862608 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5mdq\" (UniqueName: \"kubernetes.io/projected/68ad885b-9cc5-4361-877d-bdf7e1934c7e-kube-api-access-l5mdq\") pod \"cluster-samples-operator-665b6dd947-mb78g\" (UID: \"68ad885b-9cc5-4361-877d-bdf7e1934c7e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mb78g" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.883617 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24sxf\" (UniqueName: \"kubernetes.io/projected/7cfb4e50-5002-4f0d-af8c-5edf8a29d87c-kube-api-access-24sxf\") pod \"apiserver-76f77b778f-dwl5h\" (UID: \"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c\") " pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.896417 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.902835 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.913304 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6djj\" (UniqueName: \"kubernetes.io/projected/b17e4ccd-f166-4933-99ec-ef4a0445ef30-kube-api-access-d6djj\") pod \"console-f9d7485db-x2j5v\" (UID: \"b17e4ccd-f166-4933-99ec-ef4a0445ef30\") " pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.922560 4718 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.944581 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.952760 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.963659 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.971414 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-7hkr4"] Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.982406 4718 request.go:700] Waited for 1.900491621s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-canary/secrets?fieldSelector=metadata.name%3Dcanary-serving-cert&limit=500&resourceVersion=0 Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.982951 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.983935 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.992878 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:49 crc kubenswrapper[4718]: I1124 08:37:49.998435 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx"] Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.003664 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.023133 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.024989 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp"] Nov 24 08:37:50 crc kubenswrapper[4718]: W1124 08:37:50.036712 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b3da8f2_2160_4e82_94fa_a44757b4a481.slice/crio-8b5846e382df0a61b476d037a16b34bd01dbe750922b9ef1a649301ac8f94928 WatchSource:0}: Error finding container 8b5846e382df0a61b476d037a16b34bd01dbe750922b9ef1a649301ac8f94928: Status 404 returned error can't find the container with id 8b5846e382df0a61b476d037a16b34bd01dbe750922b9ef1a649301ac8f94928 Nov 24 08:37:50 crc kubenswrapper[4718]: W1124 08:37:50.038393 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb3032dfa_2d47_4afb_870f_244eeace9aa2.slice/crio-5e7194407843c79db74e1c65ff8de7233856e1b14cce5c7ef735d3832f12c1bb WatchSource:0}: Error finding container 5e7194407843c79db74e1c65ff8de7233856e1b14cce5c7ef735d3832f12c1bb: Status 404 returned error can't find the container with id 5e7194407843c79db74e1c65ff8de7233856e1b14cce5c7ef735d3832f12c1bb Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.043108 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.064269 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qq48b"] Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.065956 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.080315 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mb78g" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.082010 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.111593 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.125341 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n55k4\" (UniqueName: \"kubernetes.io/projected/fddc4425-bbc6-4907-a27a-d76661f26708-kube-api-access-n55k4\") pod \"dns-operator-744455d44c-cd9xl\" (UID: \"fddc4425-bbc6-4907-a27a-d76661f26708\") " pod="openshift-dns-operator/dns-operator-744455d44c-cd9xl" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.126417 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.141890 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/454295a5-405c-4698-9fc0-0fda6968cc99-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-vb4dp\" (UID: \"454295a5-405c-4698-9fc0-0fda6968cc99\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vb4dp" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.158231 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xjcpp"] Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.172602 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwrq7\" (UniqueName: \"kubernetes.io/projected/bc24762a-782b-43e4-a603-d3db1f587e02-kube-api-access-pwrq7\") pod \"cluster-image-registry-operator-dc59b4c8b-pz29p\" (UID: \"bc24762a-782b-43e4-a603-d3db1f587e02\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.179090 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-n9vxz"] Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.179755 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5666\" (UniqueName: \"kubernetes.io/projected/021efb9a-dc27-4590-b85f-9d8be1dac72a-kube-api-access-x5666\") pod \"machine-config-controller-84d6567774-tk2jz\" (UID: \"021efb9a-dc27-4590-b85f-9d8be1dac72a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk2jz" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.199450 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmvsd\" (UniqueName: \"kubernetes.io/projected/d8dfe303-3867-4258-854b-c4655768faeb-kube-api-access-bmvsd\") pod \"multus-admission-controller-857f4d67dd-fgz8k\" (UID: \"d8dfe303-3867-4258-854b-c4655768faeb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fgz8k" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.221457 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-dwl5h"] Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.238102 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbzgh\" (UniqueName: \"kubernetes.io/projected/15854168-726d-44b5-80e7-d1ca941c2941-kube-api-access-rbzgh\") pod \"downloads-7954f5f757-fw72r\" (UID: \"15854168-726d-44b5-80e7-d1ca941c2941\") " pod="openshift-console/downloads-7954f5f757-fw72r" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.245291 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lfr5\" (UniqueName: \"kubernetes.io/projected/52e5d554-be8b-4312-a58c-67be24e6d340-kube-api-access-8lfr5\") pod \"catalog-operator-68c6474976-jdktz\" (UID: \"52e5d554-be8b-4312-a58c-67be24e6d340\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.265936 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kt5m\" (UniqueName: \"kubernetes.io/projected/a43d8c55-d59d-4c9e-9da5-6b333f0916a8-kube-api-access-4kt5m\") pod \"service-ca-operator-777779d784-ktfkp\" (UID: \"a43d8c55-d59d-4c9e-9da5-6b333f0916a8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.269746 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-cd9xl" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.277302 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmm84\" (UniqueName: \"kubernetes.io/projected/be8de0a9-f2bf-4b42-8117-6c31ee72abeb-kube-api-access-nmm84\") pod \"ingress-operator-5b745b69d9-rphbq\" (UID: \"be8de0a9-f2bf-4b42-8117-6c31ee72abeb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.281127 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-7hkr4" event={"ID":"4b5d227e-1a6b-466b-b380-1e5f7d407e0f","Type":"ContainerStarted","Data":"098bad27ef891dd5613285263c5d09d52df012dc7b0435685c2cefe35a938eb5"} Nov 24 08:37:50 crc kubenswrapper[4718]: W1124 08:37:50.284045 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7cfb4e50_5002_4f0d_af8c_5edf8a29d87c.slice/crio-09f5673ee4a655a80e911489ed5a966dca690f6ca13092db5d925f7ec4bb50c5 WatchSource:0}: Error finding container 09f5673ee4a655a80e911489ed5a966dca690f6ca13092db5d925f7ec4bb50c5: Status 404 returned error can't find the container with id 09f5673ee4a655a80e911489ed5a966dca690f6ca13092db5d925f7ec4bb50c5 Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.297507 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vb4dp" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.302192 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" event={"ID":"7bb84203-369a-468c-9b00-c4a5650b88c8","Type":"ContainerStarted","Data":"93ddd6dfb68003620fc60128ad76fc99396d182e8fa12b00a2eea855913af132"} Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.308880 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk2jz" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.312642 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qq48b" event={"ID":"fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d","Type":"ContainerStarted","Data":"a4e70c7dbf8e7dfdac7f6bd70d5766bcfb3491eb4925cec2fccff0690242e654"} Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.315564 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8slcq\" (UniqueName: \"kubernetes.io/projected/da69e256-e90c-4fde-94f6-d08522e2f3da-kube-api-access-8slcq\") pod \"collect-profiles-29399550-lxjc5\" (UID: \"da69e256-e90c-4fde-94f6-d08522e2f3da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.325604 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-fgz8k" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.326055 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22fwh\" (UniqueName: \"kubernetes.io/projected/f1f4765d-f9d8-4590-99b4-e1e0823424cd-kube-api-access-22fwh\") pod \"control-plane-machine-set-operator-78cbb6b69f-kzq5s\" (UID: \"f1f4765d-f9d8-4590-99b4-e1e0823424cd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kzq5s" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.331850 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" event={"ID":"0b3da8f2-2160-4e82-94fa-a44757b4a481","Type":"ContainerStarted","Data":"8b5846e382df0a61b476d037a16b34bd01dbe750922b9ef1a649301ac8f94928"} Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.334791 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.338708 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" event={"ID":"b3032dfa-2d47-4afb-870f-244eeace9aa2","Type":"ContainerStarted","Data":"5e7194407843c79db74e1c65ff8de7233856e1b14cce5c7ef735d3832f12c1bb"} Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.342145 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt" event={"ID":"575d6e01-e969-4c4b-8e9b-20f68bfc57e9","Type":"ContainerStarted","Data":"d8577639fa34fd65a678dfb608413b40c4f94cb6d98987babb213b51eeddbfe3"} Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.342764 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk8zp\" (UniqueName: \"kubernetes.io/projected/9d95a4c1-33fb-4dcd-83eb-5d4e0c666651-kube-api-access-pk8zp\") pod \"olm-operator-6b444d44fb-bfsfw\" (UID: \"9d95a4c1-33fb-4dcd-83eb-5d4e0c666651\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.366194 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" event={"ID":"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9","Type":"ContainerStarted","Data":"8db2fe92bd6f0412853efebf74269e9cfe34996051bef28b80506052c8535d3a"} Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.374350 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/be8de0a9-f2bf-4b42-8117-6c31ee72abeb-bound-sa-token\") pod \"ingress-operator-5b745b69d9-rphbq\" (UID: \"be8de0a9-f2bf-4b42-8117-6c31ee72abeb\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.376785 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.376903 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.391735 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cdcx\" (UniqueName: \"kubernetes.io/projected/87a7e109-865a-444f-8e06-ba8a6ff6e6e8-kube-api-access-7cdcx\") pod \"etcd-operator-b45778765-hq4j2\" (UID: \"87a7e109-865a-444f-8e06-ba8a6ff6e6e8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.403707 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/98d95fdd-1d35-40d5-bb23-7c605ea5e392-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-8rtrx\" (UID: \"98d95fdd-1d35-40d5-bb23-7c605ea5e392\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8rtrx" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.419673 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kzq5s" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.420207 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.424470 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cj5wx\" (UniqueName: \"kubernetes.io/projected/d48d710f-a4a0-402e-b403-58577c79294d-kube-api-access-cj5wx\") pod \"service-ca-9c57cc56f-s27jw\" (UID: \"d48d710f-a4a0-402e-b403-58577c79294d\") " pod="openshift-service-ca/service-ca-9c57cc56f-s27jw" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.438047 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbkgh\" (UniqueName: \"kubernetes.io/projected/8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1-kube-api-access-dbkgh\") pod \"packageserver-d55dfcdfc-pn2ck\" (UID: \"8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.443624 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.443721 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-x2j5v"] Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.466868 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fs6zk\" (UniqueName: \"kubernetes.io/projected/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-kube-api-access-fs6zk\") pod \"route-controller-manager-6576b87f9c-nmrnw\" (UID: \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.489331 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46kgn\" (UniqueName: \"kubernetes.io/projected/33484a8a-7b9c-4faa-901a-666830edd1f1-kube-api-access-46kgn\") pod \"console-operator-58897d9998-sqk2g\" (UID: \"33484a8a-7b9c-4faa-901a-666830edd1f1\") " pod="openshift-console-operator/console-operator-58897d9998-sqk2g" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.503324 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vt2wq\" (UniqueName: \"kubernetes.io/projected/c0272bee-0b74-4b1b-80c4-213d866d6479-kube-api-access-vt2wq\") pod \"migrator-59844c95c7-v4765\" (UID: \"c0272bee-0b74-4b1b-80c4-213d866d6479\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-v4765" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.507401 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-fw72r" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.522821 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-sqk2g" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.526823 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w66r9\" (UniqueName: \"kubernetes.io/projected/47cf3417-8f00-44ea-82ca-5d60401f3754-kube-api-access-w66r9\") pod \"router-default-5444994796-jrkxw\" (UID: \"47cf3417-8f00-44ea-82ca-5d60401f3754\") " pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.534433 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" Nov 24 08:37:50 crc kubenswrapper[4718]: W1124 08:37:50.538348 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb17e4ccd_f166_4933_99ec_ef4a0445ef30.slice/crio-bd9bad2f465332f38aece602843fcf2cbcaa0ea3dfc15a96aeed02d422f9b809 WatchSource:0}: Error finding container bd9bad2f465332f38aece602843fcf2cbcaa0ea3dfc15a96aeed02d422f9b809: Status 404 returned error can't find the container with id bd9bad2f465332f38aece602843fcf2cbcaa0ea3dfc15a96aeed02d422f9b809 Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.549548 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.569895 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-494qt\" (UniqueName: \"kubernetes.io/projected/d037ebb5-19ab-471e-b627-3b0487dfa12c-kube-api-access-494qt\") pod \"marketplace-operator-79b997595-z4pwz\" (UID: \"d037ebb5-19ab-471e-b627-3b0487dfa12c\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.570673 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2vzl\" (UniqueName: \"kubernetes.io/projected/8a79c2f3-4280-485b-80ea-239298e165f3-kube-api-access-m2vzl\") pod \"openshift-controller-manager-operator-756b6f6bc6-p6k5z\" (UID: \"8a79c2f3-4280-485b-80ea-239298e165f3\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-p6k5z" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.576304 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mb78g"] Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.582451 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-cd9xl"] Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.584627 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bc24762a-782b-43e4-a603-d3db1f587e02-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-pz29p\" (UID: \"bc24762a-782b-43e4-a603-d3db1f587e02\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.604329 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ss9zd\" (UniqueName: \"kubernetes.io/projected/ed31fc96-34c7-4136-94fb-e2a0a41e0589-kube-api-access-ss9zd\") pod \"kube-storage-version-migrator-operator-b67b599dd-f6cxd\" (UID: \"ed31fc96-34c7-4136-94fb-e2a0a41e0589\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6cxd" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.618808 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.625894 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cdd4cfba-19fe-4e2d-bf3a-12732fcad83c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cqqjk\" (UID: \"cdd4cfba-19fe-4e2d-bf3a-12732fcad83c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cqqjk" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.637071 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-65qt8"] Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.641422 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bf4m\" (UniqueName: \"kubernetes.io/projected/8db19a4a-1b83-42c6-8c28-82d91a923903-kube-api-access-7bf4m\") pod \"machine-config-operator-74547568cd-lg22l\" (UID: \"8db19a4a-1b83-42c6-8c28-82d91a923903\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.644666 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.651059 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-s27jw" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.657355 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8rtrx" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.680698 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.689788 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6cxd" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.701038 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-v4765" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.707482 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-registry-certificates\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.707519 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-bound-sa-token\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.707543 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-registry-tls\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.707559 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-installation-pull-secrets\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.707655 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.707702 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwnp7\" (UniqueName: \"kubernetes.io/projected/9aff7214-a4fe-4aba-a399-f01710ecdc11-kube-api-access-kwnp7\") pod \"package-server-manager-789f6589d5-j77qb\" (UID: \"9aff7214-a4fe-4aba-a399-f01710ecdc11\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j77qb" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.707722 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-trusted-ca\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.707819 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9aff7214-a4fe-4aba-a399-f01710ecdc11-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-j77qb\" (UID: \"9aff7214-a4fe-4aba-a399-f01710ecdc11\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j77qb" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.707860 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6j8h\" (UniqueName: \"kubernetes.io/projected/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-kube-api-access-v6j8h\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.708068 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-ca-trust-extracted\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: E1124 08:37:50.708300 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:51.208280251 +0000 UTC m=+143.324571275 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.732758 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.801359 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz"] Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.809063 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:50 crc kubenswrapper[4718]: E1124 08:37:50.809337 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:51.309302443 +0000 UTC m=+143.425593357 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.809412 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-installation-pull-secrets\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.809517 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-registry-tls\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.809757 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.809827 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/65bbc6c4-09ab-49f3-82fe-f277e38e6dde-plugins-dir\") pod \"csi-hostpathplugin-s8r6x\" (UID: \"65bbc6c4-09ab-49f3-82fe-f277e38e6dde\") " pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.811390 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/65bbc6c4-09ab-49f3-82fe-f277e38e6dde-csi-data-dir\") pod \"csi-hostpathplugin-s8r6x\" (UID: \"65bbc6c4-09ab-49f3-82fe-f277e38e6dde\") " pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.811476 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwnp7\" (UniqueName: \"kubernetes.io/projected/9aff7214-a4fe-4aba-a399-f01710ecdc11-kube-api-access-kwnp7\") pod \"package-server-manager-789f6589d5-j77qb\" (UID: \"9aff7214-a4fe-4aba-a399-f01710ecdc11\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j77qb" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.811530 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-trusted-ca\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.812295 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d5a5e385-3080-45b3-ace7-f173a10d5a63-cert\") pod \"ingress-canary-vmh47\" (UID: \"d5a5e385-3080-45b3-ace7-f173a10d5a63\") " pod="openshift-ingress-canary/ingress-canary-vmh47" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.812356 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhvd9\" (UniqueName: \"kubernetes.io/projected/d5a5e385-3080-45b3-ace7-f173a10d5a63-kube-api-access-nhvd9\") pod \"ingress-canary-vmh47\" (UID: \"d5a5e385-3080-45b3-ace7-f173a10d5a63\") " pod="openshift-ingress-canary/ingress-canary-vmh47" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.812381 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48r8d\" (UniqueName: \"kubernetes.io/projected/a273ed05-812c-47f4-9b7a-ebc056e9102d-kube-api-access-48r8d\") pod \"dns-default-c6s86\" (UID: \"a273ed05-812c-47f4-9b7a-ebc056e9102d\") " pod="openshift-dns/dns-default-c6s86" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.812630 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/51f60e35-2ec1-4e7c-b000-5896adbcd8fc-certs\") pod \"machine-config-server-k278s\" (UID: \"51f60e35-2ec1-4e7c-b000-5896adbcd8fc\") " pod="openshift-machine-config-operator/machine-config-server-k278s" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.812827 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/51f60e35-2ec1-4e7c-b000-5896adbcd8fc-node-bootstrap-token\") pod \"machine-config-server-k278s\" (UID: \"51f60e35-2ec1-4e7c-b000-5896adbcd8fc\") " pod="openshift-machine-config-operator/machine-config-server-k278s" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.813173 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/65bbc6c4-09ab-49f3-82fe-f277e38e6dde-registration-dir\") pod \"csi-hostpathplugin-s8r6x\" (UID: \"65bbc6c4-09ab-49f3-82fe-f277e38e6dde\") " pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.813636 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnmc9\" (UniqueName: \"kubernetes.io/projected/65bbc6c4-09ab-49f3-82fe-f277e38e6dde-kube-api-access-bnmc9\") pod \"csi-hostpathplugin-s8r6x\" (UID: \"65bbc6c4-09ab-49f3-82fe-f277e38e6dde\") " pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.813792 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9aff7214-a4fe-4aba-a399-f01710ecdc11-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-j77qb\" (UID: \"9aff7214-a4fe-4aba-a399-f01710ecdc11\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j77qb" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.813897 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6j8h\" (UniqueName: \"kubernetes.io/projected/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-kube-api-access-v6j8h\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.813953 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/65bbc6c4-09ab-49f3-82fe-f277e38e6dde-socket-dir\") pod \"csi-hostpathplugin-s8r6x\" (UID: \"65bbc6c4-09ab-49f3-82fe-f277e38e6dde\") " pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:50 crc kubenswrapper[4718]: E1124 08:37:50.814055 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:51.314037957 +0000 UTC m=+143.430328861 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.814332 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a273ed05-812c-47f4-9b7a-ebc056e9102d-config-volume\") pod \"dns-default-c6s86\" (UID: \"a273ed05-812c-47f4-9b7a-ebc056e9102d\") " pod="openshift-dns/dns-default-c6s86" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.814579 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a273ed05-812c-47f4-9b7a-ebc056e9102d-metrics-tls\") pod \"dns-default-c6s86\" (UID: \"a273ed05-812c-47f4-9b7a-ebc056e9102d\") " pod="openshift-dns/dns-default-c6s86" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.814739 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-ca-trust-extracted\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.814814 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/65bbc6c4-09ab-49f3-82fe-f277e38e6dde-mountpoint-dir\") pod \"csi-hostpathplugin-s8r6x\" (UID: \"65bbc6c4-09ab-49f3-82fe-f277e38e6dde\") " pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.815081 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhkh8\" (UniqueName: \"kubernetes.io/projected/51f60e35-2ec1-4e7c-b000-5896adbcd8fc-kube-api-access-lhkh8\") pod \"machine-config-server-k278s\" (UID: \"51f60e35-2ec1-4e7c-b000-5896adbcd8fc\") " pod="openshift-machine-config-operator/machine-config-server-k278s" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.815177 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-bound-sa-token\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.815270 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-registry-certificates\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.818145 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-trusted-ca\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.820925 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-ca-trust-extracted\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.821396 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-p6k5z" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.831041 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-registry-certificates\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.831243 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.839532 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-registry-tls\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.849621 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9aff7214-a4fe-4aba-a399-f01710ecdc11-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-j77qb\" (UID: \"9aff7214-a4fe-4aba-a399-f01710ecdc11\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j77qb" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.863840 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-installation-pull-secrets\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.866208 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwnp7\" (UniqueName: \"kubernetes.io/projected/9aff7214-a4fe-4aba-a399-f01710ecdc11-kube-api-access-kwnp7\") pod \"package-server-manager-789f6589d5-j77qb\" (UID: \"9aff7214-a4fe-4aba-a399-f01710ecdc11\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j77qb" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.884040 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6j8h\" (UniqueName: \"kubernetes.io/projected/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-kube-api-access-v6j8h\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.889603 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cqqjk" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.917835 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.918058 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a273ed05-812c-47f4-9b7a-ebc056e9102d-metrics-tls\") pod \"dns-default-c6s86\" (UID: \"a273ed05-812c-47f4-9b7a-ebc056e9102d\") " pod="openshift-dns/dns-default-c6s86" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.918097 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/65bbc6c4-09ab-49f3-82fe-f277e38e6dde-mountpoint-dir\") pod \"csi-hostpathplugin-s8r6x\" (UID: \"65bbc6c4-09ab-49f3-82fe-f277e38e6dde\") " pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:50 crc kubenswrapper[4718]: E1124 08:37:50.918172 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:51.418145166 +0000 UTC m=+143.534436070 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.918198 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/65bbc6c4-09ab-49f3-82fe-f277e38e6dde-mountpoint-dir\") pod \"csi-hostpathplugin-s8r6x\" (UID: \"65bbc6c4-09ab-49f3-82fe-f277e38e6dde\") " pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.918241 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhkh8\" (UniqueName: \"kubernetes.io/projected/51f60e35-2ec1-4e7c-b000-5896adbcd8fc-kube-api-access-lhkh8\") pod \"machine-config-server-k278s\" (UID: \"51f60e35-2ec1-4e7c-b000-5896adbcd8fc\") " pod="openshift-machine-config-operator/machine-config-server-k278s" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.918298 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.918315 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/65bbc6c4-09ab-49f3-82fe-f277e38e6dde-plugins-dir\") pod \"csi-hostpathplugin-s8r6x\" (UID: \"65bbc6c4-09ab-49f3-82fe-f277e38e6dde\") " pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.918334 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/65bbc6c4-09ab-49f3-82fe-f277e38e6dde-csi-data-dir\") pod \"csi-hostpathplugin-s8r6x\" (UID: \"65bbc6c4-09ab-49f3-82fe-f277e38e6dde\") " pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.918396 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d5a5e385-3080-45b3-ace7-f173a10d5a63-cert\") pod \"ingress-canary-vmh47\" (UID: \"d5a5e385-3080-45b3-ace7-f173a10d5a63\") " pod="openshift-ingress-canary/ingress-canary-vmh47" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.918413 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhvd9\" (UniqueName: \"kubernetes.io/projected/d5a5e385-3080-45b3-ace7-f173a10d5a63-kube-api-access-nhvd9\") pod \"ingress-canary-vmh47\" (UID: \"d5a5e385-3080-45b3-ace7-f173a10d5a63\") " pod="openshift-ingress-canary/ingress-canary-vmh47" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.918430 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48r8d\" (UniqueName: \"kubernetes.io/projected/a273ed05-812c-47f4-9b7a-ebc056e9102d-kube-api-access-48r8d\") pod \"dns-default-c6s86\" (UID: \"a273ed05-812c-47f4-9b7a-ebc056e9102d\") " pod="openshift-dns/dns-default-c6s86" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.918467 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/51f60e35-2ec1-4e7c-b000-5896adbcd8fc-certs\") pod \"machine-config-server-k278s\" (UID: \"51f60e35-2ec1-4e7c-b000-5896adbcd8fc\") " pod="openshift-machine-config-operator/machine-config-server-k278s" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.918494 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/51f60e35-2ec1-4e7c-b000-5896adbcd8fc-node-bootstrap-token\") pod \"machine-config-server-k278s\" (UID: \"51f60e35-2ec1-4e7c-b000-5896adbcd8fc\") " pod="openshift-machine-config-operator/machine-config-server-k278s" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.918520 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/65bbc6c4-09ab-49f3-82fe-f277e38e6dde-registration-dir\") pod \"csi-hostpathplugin-s8r6x\" (UID: \"65bbc6c4-09ab-49f3-82fe-f277e38e6dde\") " pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.918557 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnmc9\" (UniqueName: \"kubernetes.io/projected/65bbc6c4-09ab-49f3-82fe-f277e38e6dde-kube-api-access-bnmc9\") pod \"csi-hostpathplugin-s8r6x\" (UID: \"65bbc6c4-09ab-49f3-82fe-f277e38e6dde\") " pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:50 crc kubenswrapper[4718]: E1124 08:37:50.918623 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:51.418610759 +0000 UTC m=+143.534901663 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.918592 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/65bbc6c4-09ab-49f3-82fe-f277e38e6dde-socket-dir\") pod \"csi-hostpathplugin-s8r6x\" (UID: \"65bbc6c4-09ab-49f3-82fe-f277e38e6dde\") " pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.918709 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a273ed05-812c-47f4-9b7a-ebc056e9102d-config-volume\") pod \"dns-default-c6s86\" (UID: \"a273ed05-812c-47f4-9b7a-ebc056e9102d\") " pod="openshift-dns/dns-default-c6s86" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.919014 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/65bbc6c4-09ab-49f3-82fe-f277e38e6dde-plugins-dir\") pod \"csi-hostpathplugin-s8r6x\" (UID: \"65bbc6c4-09ab-49f3-82fe-f277e38e6dde\") " pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.919133 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/65bbc6c4-09ab-49f3-82fe-f277e38e6dde-csi-data-dir\") pod \"csi-hostpathplugin-s8r6x\" (UID: \"65bbc6c4-09ab-49f3-82fe-f277e38e6dde\") " pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.919490 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a273ed05-812c-47f4-9b7a-ebc056e9102d-config-volume\") pod \"dns-default-c6s86\" (UID: \"a273ed05-812c-47f4-9b7a-ebc056e9102d\") " pod="openshift-dns/dns-default-c6s86" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.919565 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/65bbc6c4-09ab-49f3-82fe-f277e38e6dde-registration-dir\") pod \"csi-hostpathplugin-s8r6x\" (UID: \"65bbc6c4-09ab-49f3-82fe-f277e38e6dde\") " pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.919610 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/65bbc6c4-09ab-49f3-82fe-f277e38e6dde-socket-dir\") pod \"csi-hostpathplugin-s8r6x\" (UID: \"65bbc6c4-09ab-49f3-82fe-f277e38e6dde\") " pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.925686 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-bound-sa-token\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.928583 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/51f60e35-2ec1-4e7c-b000-5896adbcd8fc-node-bootstrap-token\") pod \"machine-config-server-k278s\" (UID: \"51f60e35-2ec1-4e7c-b000-5896adbcd8fc\") " pod="openshift-machine-config-operator/machine-config-server-k278s" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.929250 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a273ed05-812c-47f4-9b7a-ebc056e9102d-metrics-tls\") pod \"dns-default-c6s86\" (UID: \"a273ed05-812c-47f4-9b7a-ebc056e9102d\") " pod="openshift-dns/dns-default-c6s86" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.936496 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d5a5e385-3080-45b3-ace7-f173a10d5a63-cert\") pod \"ingress-canary-vmh47\" (UID: \"d5a5e385-3080-45b3-ace7-f173a10d5a63\") " pod="openshift-ingress-canary/ingress-canary-vmh47" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.937669 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/51f60e35-2ec1-4e7c-b000-5896adbcd8fc-certs\") pod \"machine-config-server-k278s\" (UID: \"51f60e35-2ec1-4e7c-b000-5896adbcd8fc\") " pod="openshift-machine-config-operator/machine-config-server-k278s" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.966175 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhvd9\" (UniqueName: \"kubernetes.io/projected/d5a5e385-3080-45b3-ace7-f173a10d5a63-kube-api-access-nhvd9\") pod \"ingress-canary-vmh47\" (UID: \"d5a5e385-3080-45b3-ace7-f173a10d5a63\") " pod="openshift-ingress-canary/ingress-canary-vmh47" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.971926 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5"] Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.979603 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhkh8\" (UniqueName: \"kubernetes.io/projected/51f60e35-2ec1-4e7c-b000-5896adbcd8fc-kube-api-access-lhkh8\") pod \"machine-config-server-k278s\" (UID: \"51f60e35-2ec1-4e7c-b000-5896adbcd8fc\") " pod="openshift-machine-config-operator/machine-config-server-k278s" Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.980828 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vb4dp"] Nov 24 08:37:50 crc kubenswrapper[4718]: I1124 08:37:50.997424 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnmc9\" (UniqueName: \"kubernetes.io/projected/65bbc6c4-09ab-49f3-82fe-f277e38e6dde-kube-api-access-bnmc9\") pod \"csi-hostpathplugin-s8r6x\" (UID: \"65bbc6c4-09ab-49f3-82fe-f277e38e6dde\") " pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.021566 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:51 crc kubenswrapper[4718]: E1124 08:37:51.021757 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:51.52172816 +0000 UTC m=+143.638019064 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.022727 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j77qb" Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.023706 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:51 crc kubenswrapper[4718]: E1124 08:37:51.024273 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:51.524232141 +0000 UTC m=+143.640523085 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.025263 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48r8d\" (UniqueName: \"kubernetes.io/projected/a273ed05-812c-47f4-9b7a-ebc056e9102d-kube-api-access-48r8d\") pod \"dns-default-c6s86\" (UID: \"a273ed05-812c-47f4-9b7a-ebc056e9102d\") " pod="openshift-dns/dns-default-c6s86" Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.051853 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-c6s86" Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.074456 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.083585 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-vmh47" Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.092340 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-k278s" Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.112218 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-fgz8k"] Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.113063 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw"] Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.117461 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-tk2jz"] Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.124630 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:51 crc kubenswrapper[4718]: E1124 08:37:51.125134 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:51.625114749 +0000 UTC m=+143.741405653 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.226313 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:51 crc kubenswrapper[4718]: E1124 08:37:51.226752 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:51.726735287 +0000 UTC m=+143.843026181 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.326057 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-fw72r"] Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.326828 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:51 crc kubenswrapper[4718]: E1124 08:37:51.328941 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:51.827273776 +0000 UTC m=+143.943564680 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.330148 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:51 crc kubenswrapper[4718]: E1124 08:37:51.330594 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:51.830572869 +0000 UTC m=+143.946863843 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.347896 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw"] Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.367645 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kzq5s"] Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.380261 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp"] Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.429349 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mb78g" event={"ID":"68ad885b-9cc5-4361-877d-bdf7e1934c7e","Type":"ContainerStarted","Data":"cd3a20cfbb8b1801c0ba6c0dba825171e10025ffdb434dd4f80dad771251c010"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.433356 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:51 crc kubenswrapper[4718]: E1124 08:37:51.434113 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:51.934094381 +0000 UTC m=+144.050385285 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.437715 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" event={"ID":"f1adaa2b-e2cf-412a-8e38-ceb15ba12637","Type":"ContainerStarted","Data":"d37db8dedc7a41e4a03e38afc846257e80a2a01a7b16468432806a51f1f52733"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.442980 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-sqk2g"] Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.460466 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-x2j5v" event={"ID":"b17e4ccd-f166-4933-99ec-ef4a0445ef30","Type":"ContainerStarted","Data":"81412c3b87b005990350eb4dfd0ead4ef07896bb43dc309a6606cd79647cf18b"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.460518 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-x2j5v" event={"ID":"b17e4ccd-f166-4933-99ec-ef4a0445ef30","Type":"ContainerStarted","Data":"bd9bad2f465332f38aece602843fcf2cbcaa0ea3dfc15a96aeed02d422f9b809"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.466782 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-hq4j2"] Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.474725 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck"] Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.481726 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.481911 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" event={"ID":"7bb84203-369a-468c-9b00-c4a5650b88c8","Type":"ContainerStarted","Data":"52a4928d6c478331a2f1eb3a1fb2c01120a41157e5e232799cf6c463660488f0"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.482004 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qq48b" event={"ID":"fd2c40b2-688b-4e6b-a4dd-bb0d7ad16c2d","Type":"ContainerStarted","Data":"4758dd5483d7cc9a4609462d6df5ce5a1efee551e1d911e33a2a4ddb97aed854"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.482126 4718 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-xjcpp container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" start-of-body= Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.482170 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" podUID="7bb84203-369a-468c-9b00-c4a5650b88c8" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.488759 4718 generic.go:334] "Generic (PLEG): container finished" podID="b3032dfa-2d47-4afb-870f-244eeace9aa2" containerID="96d0d44ab40a02f3c694dc34376d767e5c2bc7caa52930c0d9cd0e355eda7593" exitCode=0 Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.489163 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" event={"ID":"b3032dfa-2d47-4afb-870f-244eeace9aa2","Type":"ContainerDied","Data":"96d0d44ab40a02f3c694dc34376d767e5c2bc7caa52930c0d9cd0e355eda7593"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.498179 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-cd9xl" event={"ID":"fddc4425-bbc6-4907-a27a-d76661f26708","Type":"ContainerStarted","Data":"36f7f20174504493b922e8f81cadef713a4b9e7b8d00c5343488aed30225a504"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.504014 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" event={"ID":"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9","Type":"ContainerStarted","Data":"03bd91ec750414e9c35275150682858ee2418aaf4bffa1214852e9a225939a71"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.504529 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.506118 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-fgz8k" event={"ID":"d8dfe303-3867-4258-854b-c4655768faeb","Type":"ContainerStarted","Data":"90065c487e069a7adc61b6f9f0c606f81645697291688c8b3d984932c8f8d58d"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.508569 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l"] Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.509131 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk2jz" event={"ID":"021efb9a-dc27-4590-b85f-9d8be1dac72a","Type":"ContainerStarted","Data":"13a12fc1962f998bc2df1226cd14b3f2d8aa424f0d30036d39e3e40be59d9d5c"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.516920 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" event={"ID":"9d95a4c1-33fb-4dcd-83eb-5d4e0c666651","Type":"ContainerStarted","Data":"4ea1162619b58ea15df540beb8c29fcb39316ca2adf22e48700258827f6e357a"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.522075 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cqqjk"] Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.524573 4718 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-n9vxz container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.524704 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" podUID="8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.525227 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5" event={"ID":"da69e256-e90c-4fde-94f6-d08522e2f3da","Type":"ContainerStarted","Data":"967cd539aae2777f852c07864b7c1292ee29c9c32c7646910cf970ac7c4384ea"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.530889 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt" event={"ID":"575d6e01-e969-4c4b-8e9b-20f68bfc57e9","Type":"ContainerStarted","Data":"1e685ef2119830baeeaa5884ef46e5eb9ea54a382d6082a10cb529b7787657ab"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.535804 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-7hkr4" event={"ID":"4b5d227e-1a6b-466b-b380-1e5f7d407e0f","Type":"ContainerStarted","Data":"30f8fe6cbb2182bd1a5bdbb8a3864c2995c315efc8880fb7a2a7a2d754aacf94"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.535863 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-7hkr4" event={"ID":"4b5d227e-1a6b-466b-b380-1e5f7d407e0f","Type":"ContainerStarted","Data":"ee2b9ba119813efe966cee9e192e0abf386f2808065640e592e4447c64ae5377"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.536144 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:51 crc kubenswrapper[4718]: E1124 08:37:51.538598 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:52.038575661 +0000 UTC m=+144.154866645 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.547005 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-jrkxw" event={"ID":"47cf3417-8f00-44ea-82ca-5d60401f3754","Type":"ContainerStarted","Data":"bbc92f9510c6eebb70ecc2477731c0210e55db748e7a423d02dff58e53921bc3"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.548237 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-k278s" event={"ID":"51f60e35-2ec1-4e7c-b000-5896adbcd8fc","Type":"ContainerStarted","Data":"0a931d86c6591d538f587dbe6320c75c1eb0010059e37ab7e426de4f7f6a2b5c"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.550622 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz" event={"ID":"52e5d554-be8b-4312-a58c-67be24e6d340","Type":"ContainerStarted","Data":"877d78dad950d298e3b1efbdaaa998196a69c5b9301812f1da6ed8d60caaee89"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.551773 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vb4dp" event={"ID":"454295a5-405c-4698-9fc0-0fda6968cc99","Type":"ContainerStarted","Data":"85ea568d873d11c60ceca891c7b71146a8a9410925fcc0797b44099ddbcdb1b2"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.556014 4718 generic.go:334] "Generic (PLEG): container finished" podID="7cfb4e50-5002-4f0d-af8c-5edf8a29d87c" containerID="c17cc781827619ac2d9533b18c32b44f6aee068ecd6698c5d511c1b3c2098767" exitCode=0 Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.556098 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" event={"ID":"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c","Type":"ContainerDied","Data":"c17cc781827619ac2d9533b18c32b44f6aee068ecd6698c5d511c1b3c2098767"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.556125 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" event={"ID":"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c","Type":"ContainerStarted","Data":"09f5673ee4a655a80e911489ed5a966dca690f6ca13092db5d925f7ec4bb50c5"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.580253 4718 generic.go:334] "Generic (PLEG): container finished" podID="0b3da8f2-2160-4e82-94fa-a44757b4a481" containerID="907b7db5e77c5e3f9f0c36b10712b837a1b86b13d1575fc899676ef3d4cf7d62" exitCode=0 Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.581414 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" event={"ID":"0b3da8f2-2160-4e82-94fa-a44757b4a481","Type":"ContainerDied","Data":"907b7db5e77c5e3f9f0c36b10712b837a1b86b13d1575fc899676ef3d4cf7d62"} Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.597092 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq"] Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.616040 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-s27jw"] Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.637613 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:51 crc kubenswrapper[4718]: E1124 08:37:51.637743 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:52.137713499 +0000 UTC m=+144.254004413 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.638608 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:51 crc kubenswrapper[4718]: E1124 08:37:51.639894 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:52.139877981 +0000 UTC m=+144.256168875 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:51 crc kubenswrapper[4718]: W1124 08:37:51.714745 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8db19a4a_1b83_42c6_8c28_82d91a923903.slice/crio-c3dffb3a86d929aba6a4b048d285ae54675e5671fc97bd4504fbbb7513d099cf WatchSource:0}: Error finding container c3dffb3a86d929aba6a4b048d285ae54675e5671fc97bd4504fbbb7513d099cf: Status 404 returned error can't find the container with id c3dffb3a86d929aba6a4b048d285ae54675e5671fc97bd4504fbbb7513d099cf Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.739871 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:51 crc kubenswrapper[4718]: E1124 08:37:51.740366 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:52.240349497 +0000 UTC m=+144.356640401 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.783989 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-v4765"] Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.805658 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6cxd"] Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.812164 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p"] Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.840958 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:51 crc kubenswrapper[4718]: E1124 08:37:51.841274 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:52.341258656 +0000 UTC m=+144.457549560 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:51 crc kubenswrapper[4718]: W1124 08:37:51.907387 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poded31fc96_34c7_4136_94fb_e2a0a41e0589.slice/crio-d4881c1c3b6445043bc3c31d1723c810c961f62d88582e45f860c060dccb95b0 WatchSource:0}: Error finding container d4881c1c3b6445043bc3c31d1723c810c961f62d88582e45f860c060dccb95b0: Status 404 returned error can't find the container with id d4881c1c3b6445043bc3c31d1723c810c961f62d88582e45f860c060dccb95b0 Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.942459 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:51 crc kubenswrapper[4718]: E1124 08:37:51.942955 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:52.442937666 +0000 UTC m=+144.559228570 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:51 crc kubenswrapper[4718]: I1124 08:37:51.961460 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-c6s86"] Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.035778 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-z4pwz"] Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.044695 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.044759 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.045320 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:52 crc kubenswrapper[4718]: E1124 08:37:52.045997 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:52.545962834 +0000 UTC m=+144.662253738 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.054291 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-p6k5z"] Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.055689 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8rtrx"] Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.125871 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j77qb"] Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.146609 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:52 crc kubenswrapper[4718]: E1124 08:37:52.147183 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:52.647161431 +0000 UTC m=+144.763452335 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:52 crc kubenswrapper[4718]: W1124 08:37:52.148451 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda273ed05_812c_47f4_9b7a_ebc056e9102d.slice/crio-8615c7d5a1d9a617cfc095fb24d71ed333baa022c4395026717308382f6b397f WatchSource:0}: Error finding container 8615c7d5a1d9a617cfc095fb24d71ed333baa022c4395026717308382f6b397f: Status 404 returned error can't find the container with id 8615c7d5a1d9a617cfc095fb24d71ed333baa022c4395026717308382f6b397f Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.170587 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" podStartSLOduration=124.170564272 podStartE2EDuration="2m4.170564272s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:52.16979411 +0000 UTC m=+144.286085024" watchObservedRunningTime="2025-11-24 08:37:52.170564272 +0000 UTC m=+144.286855176" Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.256572 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:52 crc kubenswrapper[4718]: E1124 08:37:52.256933 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:52.75691711 +0000 UTC m=+144.873208004 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.263182 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-s8r6x"] Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.266015 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-vmh47"] Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.267640 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" podStartSLOduration=124.267599811 podStartE2EDuration="2m4.267599811s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:52.261091977 +0000 UTC m=+144.377382871" watchObservedRunningTime="2025-11-24 08:37:52.267599811 +0000 UTC m=+144.383890715" Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.321595 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qq48b" podStartSLOduration=124.321330568 podStartE2EDuration="2m4.321330568s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:52.317637214 +0000 UTC m=+144.433928118" watchObservedRunningTime="2025-11-24 08:37:52.321330568 +0000 UTC m=+144.437621482" Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.358045 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:52 crc kubenswrapper[4718]: E1124 08:37:52.358239 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:52.858208379 +0000 UTC m=+144.974499293 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.358801 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:52 crc kubenswrapper[4718]: E1124 08:37:52.360270 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:52.860258127 +0000 UTC m=+144.976549031 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.421425 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-x2j5v" podStartSLOduration=124.421166716 podStartE2EDuration="2m4.421166716s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:52.418679266 +0000 UTC m=+144.534970180" watchObservedRunningTime="2025-11-24 08:37:52.421166716 +0000 UTC m=+144.537457630" Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.459718 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:52 crc kubenswrapper[4718]: E1124 08:37:52.460366 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:52.960328372 +0000 UTC m=+145.076619276 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.560990 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:52 crc kubenswrapper[4718]: E1124 08:37:52.561308 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:53.061291672 +0000 UTC m=+145.177582656 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.592520 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-fgz8k" event={"ID":"d8dfe303-3867-4258-854b-c4655768faeb","Type":"ContainerStarted","Data":"eb007d554be35f1a79634347853efd6d59b8a6d71557fe22d759cab52f326cb4"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.627643 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" event={"ID":"8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1","Type":"ContainerStarted","Data":"dfee66ede79b325fc4eae542b7388fa28efdb9548b13c493679cc8f9df596107"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.646677 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5" event={"ID":"da69e256-e90c-4fde-94f6-d08522e2f3da","Type":"ContainerStarted","Data":"1ad3b6069eaa47e0ae85850445d95a17ecf96f8a0a088f43d22e303dda115dd2"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.664377 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-s27jw" event={"ID":"d48d710f-a4a0-402e-b403-58577c79294d","Type":"ContainerStarted","Data":"e7171789bede4d04cdbf2e8bbd0f29b6362299f1ddb16f12a914ab10dfbdbd72"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.665303 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:52 crc kubenswrapper[4718]: E1124 08:37:52.665662 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:53.165648298 +0000 UTC m=+145.281939192 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.668768 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" event={"ID":"65bbc6c4-09ab-49f3-82fe-f277e38e6dde","Type":"ContainerStarted","Data":"b683fa0dde0dfc55ba5fdcb40e43fa3f7272c117cd610a64ee7b5bd298075b93"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.700588 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-jrkxw" event={"ID":"47cf3417-8f00-44ea-82ca-5d60401f3754","Type":"ContainerStarted","Data":"80e6d32e313947cd3f76b33ad65d9bb4c5c375e875153c7a3c625e2577fbcc81"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.710115 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8rtrx" event={"ID":"98d95fdd-1d35-40d5-bb23-7c605ea5e392","Type":"ContainerStarted","Data":"b2a46b0f37e7335296195556f22b73050fea2bb603143f067fbfa106173d2ea8"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.718516 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-7hkr4" podStartSLOduration=123.7184922 podStartE2EDuration="2m3.7184922s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:52.711852032 +0000 UTC m=+144.828142936" watchObservedRunningTime="2025-11-24 08:37:52.7184922 +0000 UTC m=+144.834783124" Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.728370 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" event={"ID":"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d","Type":"ContainerStarted","Data":"c38e2e87fe4f684eab9d0a97d608f6388e9d6790118723eaabb0adfeea17f6b7"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.728424 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" event={"ID":"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d","Type":"ContainerStarted","Data":"31052415ddcc508782b1d034f07bbdbf1abb453f286df82d905bbe663a15d2c1"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.728836 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.755112 4718 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-nmrnw container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.755179 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" podUID="e91241fe-8061-4ab3-ac7a-5f3d58e01d5d" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.767952 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:52 crc kubenswrapper[4718]: E1124 08:37:52.769732 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:53.269695465 +0000 UTC m=+145.385986359 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.774182 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p" event={"ID":"bc24762a-782b-43e4-a603-d3db1f587e02","Type":"ContainerStarted","Data":"efa7fa146357a19fd91e43775ffe628c4994289b9113b6d73ab7ff36b93adf65"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.780576 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-v4765" event={"ID":"c0272bee-0b74-4b1b-80c4-213d866d6479","Type":"ContainerStarted","Data":"08cd8323ce5bec5f203b69c39160e14859fda4a9bf802efe5b28b376f10fd0da"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.787416 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-vmh47" event={"ID":"d5a5e385-3080-45b3-ace7-f173a10d5a63","Type":"ContainerStarted","Data":"eb9f4a9041b6ba35d2264125ce04b03e1ce90992d616a565ff63d7584225c56a"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.805949 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz" event={"ID":"52e5d554-be8b-4312-a58c-67be24e6d340","Type":"ContainerStarted","Data":"cbabf7f41a00139ea6e92eb73690315a634c5ee64b260524a4354e4e6a973d9e"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.806846 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz" Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.812567 4718 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-jdktz container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.812635 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz" podUID="52e5d554-be8b-4312-a58c-67be24e6d340" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.847488 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq" event={"ID":"be8de0a9-f2bf-4b42-8117-6c31ee72abeb","Type":"ContainerStarted","Data":"ba6498e5e76627eda33d7c886cec6911fa06f52d2c220bdc65f44a7e607e1cc4"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.865942 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" event={"ID":"87a7e109-865a-444f-8e06-ba8a6ff6e6e8","Type":"ContainerStarted","Data":"b57c5a445e06c0f90dd13e13cbb4bb36eb2a6d0267f3673567d231c222217f79"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.873875 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:52 crc kubenswrapper[4718]: E1124 08:37:52.875688 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:53.375670487 +0000 UTC m=+145.491961391 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.875953 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5" podStartSLOduration=123.875928704 podStartE2EDuration="2m3.875928704s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:52.873497236 +0000 UTC m=+144.989788140" watchObservedRunningTime="2025-11-24 08:37:52.875928704 +0000 UTC m=+144.992219618" Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.889821 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mb78g" event={"ID":"68ad885b-9cc5-4361-877d-bdf7e1934c7e","Type":"ContainerStarted","Data":"c75c99512109daa6eaafc3db7e440a9e8fe59ad15ae20fce777d7ea275bb4e2c"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.896219 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cqqjk" event={"ID":"cdd4cfba-19fe-4e2d-bf3a-12732fcad83c","Type":"ContainerStarted","Data":"4314793a4bd6270acb0456f45fc5e037fd5cb29a9054981443570f7a8e06c512"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.924659 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-jrkxw" podStartSLOduration=124.924640769 podStartE2EDuration="2m4.924640769s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:52.921228153 +0000 UTC m=+145.037519077" watchObservedRunningTime="2025-11-24 08:37:52.924640769 +0000 UTC m=+145.040931673" Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.929157 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-cd9xl" event={"ID":"fddc4425-bbc6-4907-a27a-d76661f26708","Type":"ContainerStarted","Data":"db2d72d5781b76c63f1e4999874c4432392f074c6cd27d82c200d648596a3aa4"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.932170 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk2jz" event={"ID":"021efb9a-dc27-4590-b85f-9d8be1dac72a","Type":"ContainerStarted","Data":"2cb55e152284a957156a7e233431fa020a1da080405dd5cfd696cd62393774d0"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.942593 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-c6s86" event={"ID":"a273ed05-812c-47f4-9b7a-ebc056e9102d","Type":"ContainerStarted","Data":"8615c7d5a1d9a617cfc095fb24d71ed333baa022c4395026717308382f6b397f"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.945806 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-sqk2g" event={"ID":"33484a8a-7b9c-4faa-901a-666830edd1f1","Type":"ContainerStarted","Data":"245058521eb4dea3bcfd8116ec84cd1cbd860ef2b55653849c329e55c26941ff"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.945857 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-sqk2g" event={"ID":"33484a8a-7b9c-4faa-901a-666830edd1f1","Type":"ContainerStarted","Data":"b2664b95c526147d118750fb4c2fde8260e51d5cd748cb175845b3613329c2c0"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.945885 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-sqk2g" Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.949293 4718 patch_prober.go:28] interesting pod/console-operator-58897d9998-sqk2g container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.20:8443/readyz\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.949340 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-sqk2g" podUID="33484a8a-7b9c-4faa-901a-666830edd1f1" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.20:8443/readyz\": dial tcp 10.217.0.20:8443: connect: connection refused" Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.980365 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" event={"ID":"9d95a4c1-33fb-4dcd-83eb-5d4e0c666651","Type":"ContainerStarted","Data":"7fc88f85e72c7e0f0256da77133097235bf3122608435e84d3cf8ae072930d6c"} Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.982327 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.982454 4718 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-bfsfw container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.982492 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" podUID="9d95a4c1-33fb-4dcd-83eb-5d4e0c666651" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Nov 24 08:37:52 crc kubenswrapper[4718]: I1124 08:37:52.983157 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:52 crc kubenswrapper[4718]: E1124 08:37:52.984577 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:53.484561941 +0000 UTC m=+145.600852845 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.004038 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt" event={"ID":"575d6e01-e969-4c4b-8e9b-20f68bfc57e9","Type":"ContainerStarted","Data":"dfda4024a1daec31a0a467d328d6af8ed5e867ef3232a6ad00c97b600a4419d4"} Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.017255 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" event={"ID":"8db19a4a-1b83-42c6-8c28-82d91a923903","Type":"ContainerStarted","Data":"c3dffb3a86d929aba6a4b048d285ae54675e5671fc97bd4504fbbb7513d099cf"} Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.037791 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j77qb" event={"ID":"9aff7214-a4fe-4aba-a399-f01710ecdc11","Type":"ContainerStarted","Data":"5035027f7d4b099482a01adfe946afe2e790ac8de0f4efd4357835e6572ffae7"} Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.055016 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6cxd" event={"ID":"ed31fc96-34c7-4136-94fb-e2a0a41e0589","Type":"ContainerStarted","Data":"d4881c1c3b6445043bc3c31d1723c810c961f62d88582e45f860c060dccb95b0"} Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.069123 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" event={"ID":"f1adaa2b-e2cf-412a-8e38-ceb15ba12637","Type":"ContainerStarted","Data":"2d99a148512c89c49329b69a7abbfcd58ef44012bbb3f3882f7a0f7341476f4f"} Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.079134 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-k278s" event={"ID":"51f60e35-2ec1-4e7c-b000-5896adbcd8fc","Type":"ContainerStarted","Data":"20dd054a6421a0c9f4f0f75128bf4bbaae4d1046fbd2ab7852b54fb00e91c2d3"} Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.086442 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:53 crc kubenswrapper[4718]: E1124 08:37:53.089167 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:53.589143063 +0000 UTC m=+145.705434057 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.091049 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kzq5s" event={"ID":"f1f4765d-f9d8-4590-99b4-e1e0823424cd","Type":"ContainerStarted","Data":"a7dfb0f9b3d653d5739091dc478fc3c9ceef7c038c7141a7c1ebed7005ed98fa"} Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.124713 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp" event={"ID":"a43d8c55-d59d-4c9e-9da5-6b333f0916a8","Type":"ContainerStarted","Data":"348fae0648ff562844bbab70bf636a5a8a5cb427c11275b1be5f80e1e7c51918"} Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.166351 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" podStartSLOduration=124.166321812 podStartE2EDuration="2m4.166321812s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:53.124035878 +0000 UTC m=+145.240326792" watchObservedRunningTime="2025-11-24 08:37:53.166321812 +0000 UTC m=+145.282612726" Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.190804 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:53 crc kubenswrapper[4718]: E1124 08:37:53.192090 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:53.692076539 +0000 UTC m=+145.808367443 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.201464 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" event={"ID":"d037ebb5-19ab-471e-b627-3b0487dfa12c","Type":"ContainerStarted","Data":"1b3868e6a7d206d533d01d2a3bde1aefb24ff9fe482b014eb13c44ede23e8d50"} Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.210185 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-p6k5z" event={"ID":"8a79c2f3-4280-485b-80ea-239298e165f3","Type":"ContainerStarted","Data":"158f65cd41808030518f21eb42f378e16195df9d9b36ce94d62151c7454992ad"} Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.243337 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-fw72r" event={"ID":"15854168-726d-44b5-80e7-d1ca941c2941","Type":"ContainerStarted","Data":"f8e7a53e1eb4ae7fa090697dbe41dff5ba878bbd101ea0209d3e3f2f2dbf4492"} Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.243382 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-fw72r" Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.243392 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-fw72r" event={"ID":"15854168-726d-44b5-80e7-d1ca941c2941","Type":"ContainerStarted","Data":"e7cdfa4a4ee01fbf8f326400180509b9a5e0e825a218943538a51432ba257633"} Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.245231 4718 patch_prober.go:28] interesting pod/downloads-7954f5f757-fw72r container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.245275 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-fw72r" podUID="15854168-726d-44b5-80e7-d1ca941c2941" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.246949 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-65qt8" podStartSLOduration=125.246934768 podStartE2EDuration="2m5.246934768s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:53.228884958 +0000 UTC m=+145.345175882" watchObservedRunningTime="2025-11-24 08:37:53.246934768 +0000 UTC m=+145.363225672" Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.247859 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" podStartSLOduration=125.247851994 podStartE2EDuration="2m5.247851994s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:53.247298098 +0000 UTC m=+145.363589002" watchObservedRunningTime="2025-11-24 08:37:53.247851994 +0000 UTC m=+145.364142898" Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.268184 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.292938 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:53 crc kubenswrapper[4718]: E1124 08:37:53.293432 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:53.79341257 +0000 UTC m=+145.909703474 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.368303 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kzq5s" podStartSLOduration=124.368282533 podStartE2EDuration="2m4.368282533s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:53.302335092 +0000 UTC m=+145.418625996" watchObservedRunningTime="2025-11-24 08:37:53.368282533 +0000 UTC m=+145.484573437" Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.377292 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fc2mt" podStartSLOduration=126.377268247 podStartE2EDuration="2m6.377268247s" podCreationTimestamp="2025-11-24 08:35:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:53.367174022 +0000 UTC m=+145.483464926" watchObservedRunningTime="2025-11-24 08:37:53.377268247 +0000 UTC m=+145.493559161" Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.399847 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.404490 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz" podStartSLOduration=124.404475095 podStartE2EDuration="2m4.404475095s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:53.40253128 +0000 UTC m=+145.518822204" watchObservedRunningTime="2025-11-24 08:37:53.404475095 +0000 UTC m=+145.520765999" Nov 24 08:37:53 crc kubenswrapper[4718]: E1124 08:37:53.405935 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:53.905919866 +0000 UTC m=+146.022210760 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.488910 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-sqk2g" podStartSLOduration=125.488886018 podStartE2EDuration="2m5.488886018s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:53.437723024 +0000 UTC m=+145.554013928" watchObservedRunningTime="2025-11-24 08:37:53.488886018 +0000 UTC m=+145.605176922" Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.490477 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp" podStartSLOduration=124.490449062 podStartE2EDuration="2m4.490449062s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:53.487584111 +0000 UTC m=+145.603875005" watchObservedRunningTime="2025-11-24 08:37:53.490449062 +0000 UTC m=+145.606739966" Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.503445 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:53 crc kubenswrapper[4718]: E1124 08:37:53.503859 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:54.00383904 +0000 UTC m=+146.120129954 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.534121 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-cd9xl" podStartSLOduration=125.534102295 podStartE2EDuration="2m5.534102295s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:53.525331997 +0000 UTC m=+145.641622901" watchObservedRunningTime="2025-11-24 08:37:53.534102295 +0000 UTC m=+145.650393199" Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.555755 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.555935 4718 patch_prober.go:28] interesting pod/router-default-5444994796-jrkxw container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.555984 4718 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jrkxw" podUID="47cf3417-8f00-44ea-82ca-5d60401f3754" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.566034 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" podStartSLOduration=124.566019216 podStartE2EDuration="2m4.566019216s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:53.564565635 +0000 UTC m=+145.680856539" watchObservedRunningTime="2025-11-24 08:37:53.566019216 +0000 UTC m=+145.682310120" Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.607759 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:53 crc kubenswrapper[4718]: E1124 08:37:53.608139 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:54.108127914 +0000 UTC m=+146.224418818 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.648543 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-k278s" podStartSLOduration=5.648523875 podStartE2EDuration="5.648523875s" podCreationTimestamp="2025-11-24 08:37:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:53.610673956 +0000 UTC m=+145.726964870" watchObservedRunningTime="2025-11-24 08:37:53.648523875 +0000 UTC m=+145.764814779" Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.702000 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.702128 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-fw72r" podStartSLOduration=125.702113028 podStartE2EDuration="2m5.702113028s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:53.659580357 +0000 UTC m=+145.775871261" watchObservedRunningTime="2025-11-24 08:37:53.702113028 +0000 UTC m=+145.818403942" Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.708678 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:53 crc kubenswrapper[4718]: E1124 08:37:53.709012 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:54.208995552 +0000 UTC m=+146.325286456 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.809599 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:53 crc kubenswrapper[4718]: E1124 08:37:53.810948 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:54.310929899 +0000 UTC m=+146.427220803 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:53 crc kubenswrapper[4718]: I1124 08:37:53.919196 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:53 crc kubenswrapper[4718]: E1124 08:37:53.919659 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:54.419632748 +0000 UTC m=+146.535923652 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.021226 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:54 crc kubenswrapper[4718]: E1124 08:37:54.021783 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:54.521762871 +0000 UTC m=+146.638053845 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.122856 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:54 crc kubenswrapper[4718]: E1124 08:37:54.123325 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:54.623294298 +0000 UTC m=+146.739585232 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.224591 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:54 crc kubenswrapper[4718]: E1124 08:37:54.225040 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:54.725024019 +0000 UTC m=+146.841314923 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.248306 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8rtrx" event={"ID":"98d95fdd-1d35-40d5-bb23-7c605ea5e392","Type":"ContainerStarted","Data":"841f7895e77a449ee67f0bb6d84f2ee792b3446cc762544373cef6061978656e"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.250736 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" event={"ID":"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c","Type":"ContainerStarted","Data":"378c1f717777a69316051a1ed79f8554d3981fd68e48f7cf05a01ae63850b2b6"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.250788 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" event={"ID":"7cfb4e50-5002-4f0d-af8c-5edf8a29d87c","Type":"ContainerStarted","Data":"20e03098993705d10ac1ecec71616132c9c74482cc7b7c8261f311abe5301d22"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.252738 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j77qb" event={"ID":"9aff7214-a4fe-4aba-a399-f01710ecdc11","Type":"ContainerStarted","Data":"538b27a869868573f1228663862456c6d871388ab66e13ff9a5f86771ccaac4a"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.252804 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j77qb" event={"ID":"9aff7214-a4fe-4aba-a399-f01710ecdc11","Type":"ContainerStarted","Data":"aab4286bfc1850dbc782b1cfd90e6462afa256fbeb1970269d47134735c337da"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.252871 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j77qb" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.254127 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cqqjk" event={"ID":"cdd4cfba-19fe-4e2d-bf3a-12732fcad83c","Type":"ContainerStarted","Data":"c9803ed75fa34be6873b42871fa1789c7a132caec820cb0a9b9b5358ee6b7cea"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.255952 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" event={"ID":"b3032dfa-2d47-4afb-870f-244eeace9aa2","Type":"ContainerStarted","Data":"365f46f943e8c4dfdb1dca76a00fb6be68db599fbd67356529800eda8c1040fb"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.257517 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-cd9xl" event={"ID":"fddc4425-bbc6-4907-a27a-d76661f26708","Type":"ContainerStarted","Data":"24fbbef383b1fafd975c57175c188639a4568ae147c3d383c3a2a3ef420c5808"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.258538 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" event={"ID":"8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1","Type":"ContainerStarted","Data":"28e5c500c751da3b400ca5effdf5a1b8fe8b5131060ad962e7d8439a4fcbb550"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.258750 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.259993 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq" event={"ID":"be8de0a9-f2bf-4b42-8117-6c31ee72abeb","Type":"ContainerStarted","Data":"ab4434f7efc9016e0ed12176a9824d3ddc3e21f2300176bac12344534a2bc591"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.260020 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq" event={"ID":"be8de0a9-f2bf-4b42-8117-6c31ee72abeb","Type":"ContainerStarted","Data":"2fa66200219082b5ac273ce4caa7ed9dd4e9d261303e73441f81b7c0c615aeae"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.260092 4718 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-pn2ck container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" start-of-body= Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.260131 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" podUID="8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.261475 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-hq4j2" event={"ID":"87a7e109-865a-444f-8e06-ba8a6ff6e6e8","Type":"ContainerStarted","Data":"859de0e4ab17d2f757ee7d1787207debd42a3a982e6959d1e076114d5dc1f812"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.263247 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" event={"ID":"0b3da8f2-2160-4e82-94fa-a44757b4a481","Type":"ContainerStarted","Data":"68d34340b2b45a0c6e239fb6f693f6bb7827023fdad49783181199bfeefa8ac4"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.263416 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.264852 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p" event={"ID":"bc24762a-782b-43e4-a603-d3db1f587e02","Type":"ContainerStarted","Data":"60edc8d9dd3541cad9ba0c14adb0f7db4dff07f7336d3fa3e86e47c2bc341fa4"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.266174 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-p6k5z" event={"ID":"8a79c2f3-4280-485b-80ea-239298e165f3","Type":"ContainerStarted","Data":"89fcb02181dba0ac7a1a1e35fa73b9909dfb47822deaa1acc1632d1c4a7e04d0"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.267640 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" event={"ID":"8db19a4a-1b83-42c6-8c28-82d91a923903","Type":"ContainerStarted","Data":"3fd1b3eadd850643b74d37737cd9744e09d88eac84f123dcf336a0195f5381ed"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.267831 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" event={"ID":"8db19a4a-1b83-42c6-8c28-82d91a923903","Type":"ContainerStarted","Data":"732aeb36a54e2c99c6331089f5d0f35f354433929fb16db120581db4f53a69f6"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.268990 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mb78g" event={"ID":"68ad885b-9cc5-4361-877d-bdf7e1934c7e","Type":"ContainerStarted","Data":"2572a04664e97ede909502ea3a81c50c9440721ff44d39f056ce195216fae67d"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.274053 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-vmh47" event={"ID":"d5a5e385-3080-45b3-ace7-f173a10d5a63","Type":"ContainerStarted","Data":"2b552d768d0c92ce8f802bc8213497d7d1f96d54e303488bab68cf9bab342884"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.275516 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6cxd" event={"ID":"ed31fc96-34c7-4136-94fb-e2a0a41e0589","Type":"ContainerStarted","Data":"1654877e79faa52a846103031ecef1fcad1b1ecf2ddc4f183524e04418ec734b"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.276860 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-c6s86" event={"ID":"a273ed05-812c-47f4-9b7a-ebc056e9102d","Type":"ContainerStarted","Data":"1d8152c9fd79764e7768e6ed44743a448614858a6f59e4a95fa6998f40f643eb"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.278222 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-v4765" event={"ID":"c0272bee-0b74-4b1b-80c4-213d866d6479","Type":"ContainerStarted","Data":"d767b1a6953c20fa8077c35d66cea5da53056fa715957a77c1cd9b20684d4f19"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.278256 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-v4765" event={"ID":"c0272bee-0b74-4b1b-80c4-213d866d6479","Type":"ContainerStarted","Data":"7976f4dbfe1f80c7e00ba1bc632a563bd0da1c0e7b2a853447c21380bbe36eea"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.278627 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8rtrx" podStartSLOduration=126.278608471 podStartE2EDuration="2m6.278608471s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.275699599 +0000 UTC m=+146.391990513" watchObservedRunningTime="2025-11-24 08:37:54.278608471 +0000 UTC m=+146.394899375" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.280092 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kzq5s" event={"ID":"f1f4765d-f9d8-4590-99b4-e1e0823424cd","Type":"ContainerStarted","Data":"4e2feb71225cc4d5036e3c7d0a05828967f0665c3a0712810ef98940213c3596"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.286897 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-s27jw" event={"ID":"d48d710f-a4a0-402e-b403-58577c79294d","Type":"ContainerStarted","Data":"a61a8c54e403e22964046cc089d0e59673c6337e27105326cb3ae4274fb8a767"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.288703 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-ktfkp" event={"ID":"a43d8c55-d59d-4c9e-9da5-6b333f0916a8","Type":"ContainerStarted","Data":"57f4996b6ed622407ddf0b2815af6fe035cd3bd47085b618772b92e2f1aed4db"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.290603 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vb4dp" event={"ID":"454295a5-405c-4698-9fc0-0fda6968cc99","Type":"ContainerStarted","Data":"6cf5251a6fe38e450c0b58da8d9198bfc9fd7d12a1c418304ff5af635b486e6d"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.292120 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" event={"ID":"d037ebb5-19ab-471e-b627-3b0487dfa12c","Type":"ContainerStarted","Data":"1d44db86d76946d29d4aacddf78eb37ec641c041219ce4a97e55e4621d050999"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.292325 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.302766 4718 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-z4pwz container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.43:8080/healthz\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.302830 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" podUID="d037ebb5-19ab-471e-b627-3b0487dfa12c" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.43:8080/healthz\": dial tcp 10.217.0.43:8080: connect: connection refused" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.306821 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk2jz" event={"ID":"021efb9a-dc27-4590-b85f-9d8be1dac72a","Type":"ContainerStarted","Data":"3588138d587d947cbabadacec9bf584c967f9166a1f01b36b65bee2cd7df4463"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.312356 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" podStartSLOduration=125.312339943 podStartE2EDuration="2m5.312339943s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.310730398 +0000 UTC m=+146.427021302" watchObservedRunningTime="2025-11-24 08:37:54.312339943 +0000 UTC m=+146.428630857" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.312916 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-fgz8k" event={"ID":"d8dfe303-3867-4258-854b-c4655768faeb","Type":"ContainerStarted","Data":"9d88a20e6c9a5d8ff785764a91f251cf789e9a0bfeb1911e2be7b5b90cf2bc25"} Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.314876 4718 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-nmrnw container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.314915 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" podUID="e91241fe-8061-4ab3-ac7a-5f3d58e01d5d" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.315138 4718 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-jdktz container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.315281 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz" podUID="52e5d554-be8b-4312-a58c-67be24e6d340" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.315366 4718 patch_prober.go:28] interesting pod/downloads-7954f5f757-fw72r container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.315396 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-fw72r" podUID="15854168-726d-44b5-80e7-d1ca941c2941" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.315986 4718 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-bfsfw container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.316016 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" podUID="9d95a4c1-33fb-4dcd-83eb-5d4e0c666651" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.316192 4718 patch_prober.go:28] interesting pod/console-operator-58897d9998-sqk2g container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.20:8443/readyz\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.316218 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-sqk2g" podUID="33484a8a-7b9c-4faa-901a-666830edd1f1" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.20:8443/readyz\": dial tcp 10.217.0.20:8443: connect: connection refused" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.327326 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:54 crc kubenswrapper[4718]: E1124 08:37:54.328497 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:54.828465538 +0000 UTC m=+146.944756442 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.360705 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cqqjk" podStartSLOduration=126.360683148 podStartE2EDuration="2m6.360683148s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.359451793 +0000 UTC m=+146.475742707" watchObservedRunningTime="2025-11-24 08:37:54.360683148 +0000 UTC m=+146.476974052" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.389513 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pz29p" podStartSLOduration=126.389496871 podStartE2EDuration="2m6.389496871s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.389041739 +0000 UTC m=+146.505332643" watchObservedRunningTime="2025-11-24 08:37:54.389496871 +0000 UTC m=+146.505787775" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.429226 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.430652 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-vmh47" podStartSLOduration=7.430639323 podStartE2EDuration="7.430639323s" podCreationTimestamp="2025-11-24 08:37:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.427573426 +0000 UTC m=+146.543864320" watchObservedRunningTime="2025-11-24 08:37:54.430639323 +0000 UTC m=+146.546930227" Nov 24 08:37:54 crc kubenswrapper[4718]: E1124 08:37:54.436858 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:54.936839388 +0000 UTC m=+147.053130362 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.471402 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" podStartSLOduration=125.471377403 podStartE2EDuration="2m5.471377403s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.471045614 +0000 UTC m=+146.587336518" watchObservedRunningTime="2025-11-24 08:37:54.471377403 +0000 UTC m=+146.587668307" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.505799 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j77qb" podStartSLOduration=125.505779714 podStartE2EDuration="2m5.505779714s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.501489683 +0000 UTC m=+146.617780587" watchObservedRunningTime="2025-11-24 08:37:54.505779714 +0000 UTC m=+146.622070618" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.524024 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-p6k5z" podStartSLOduration=126.523995918 podStartE2EDuration="2m6.523995918s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.521440246 +0000 UTC m=+146.637731160" watchObservedRunningTime="2025-11-24 08:37:54.523995918 +0000 UTC m=+146.640286822" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.532023 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:54 crc kubenswrapper[4718]: E1124 08:37:54.532505 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:55.032483138 +0000 UTC m=+147.148774042 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.556508 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" podStartSLOduration=126.556490776 podStartE2EDuration="2m6.556490776s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.55488195 +0000 UTC m=+146.671172854" watchObservedRunningTime="2025-11-24 08:37:54.556490776 +0000 UTC m=+146.672781680" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.568371 4718 patch_prober.go:28] interesting pod/router-default-5444994796-jrkxw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 08:37:54 crc kubenswrapper[4718]: [-]has-synced failed: reason withheld Nov 24 08:37:54 crc kubenswrapper[4718]: [+]process-running ok Nov 24 08:37:54 crc kubenswrapper[4718]: healthz check failed Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.568430 4718 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jrkxw" podUID="47cf3417-8f00-44ea-82ca-5d60401f3754" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.630827 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lg22l" podStartSLOduration=125.630803913 podStartE2EDuration="2m5.630803913s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.610696256 +0000 UTC m=+146.726987160" watchObservedRunningTime="2025-11-24 08:37:54.630803913 +0000 UTC m=+146.747094817" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.633456 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:54 crc kubenswrapper[4718]: E1124 08:37:54.633994 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:55.133982403 +0000 UTC m=+147.250273307 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.664118 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mb78g" podStartSLOduration=126.664078373 podStartE2EDuration="2m6.664078373s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.631450902 +0000 UTC m=+146.747741806" watchObservedRunningTime="2025-11-24 08:37:54.664078373 +0000 UTC m=+146.780369277" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.710955 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rphbq" podStartSLOduration=126.710934526 podStartE2EDuration="2m6.710934526s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.661245273 +0000 UTC m=+146.777536177" watchObservedRunningTime="2025-11-24 08:37:54.710934526 +0000 UTC m=+146.827225430" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.711518 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" podStartSLOduration=126.711514682 podStartE2EDuration="2m6.711514682s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.710704529 +0000 UTC m=+146.826995443" watchObservedRunningTime="2025-11-24 08:37:54.711514682 +0000 UTC m=+146.827805586" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.734272 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.734336 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.734791 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:54 crc kubenswrapper[4718]: E1124 08:37:54.735002 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:55.234986245 +0000 UTC m=+147.351277149 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.735061 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.735117 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-f6cxd" podStartSLOduration=125.735103138 podStartE2EDuration="2m5.735103138s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.732131704 +0000 UTC m=+146.848422608" watchObservedRunningTime="2025-11-24 08:37:54.735103138 +0000 UTC m=+146.851394042" Nov 24 08:37:54 crc kubenswrapper[4718]: E1124 08:37:54.735729 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:55.235694575 +0000 UTC m=+147.351985479 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.739135 4718 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-jmjcp container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="Get \"https://10.217.0.7:8443/livez\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.739174 4718 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" podUID="b3032dfa-2d47-4afb-870f-244eeace9aa2" containerName="oauth-apiserver" probeResult="failure" output="Get \"https://10.217.0.7:8443/livez\": dial tcp 10.217.0.7:8443: connect: connection refused" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.788100 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vb4dp" podStartSLOduration=126.788080223 podStartE2EDuration="2m6.788080223s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.785590133 +0000 UTC m=+146.901881037" watchObservedRunningTime="2025-11-24 08:37:54.788080223 +0000 UTC m=+146.904371127" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.788938 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" podStartSLOduration=125.788929397 podStartE2EDuration="2m5.788929397s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.753158418 +0000 UTC m=+146.869449322" watchObservedRunningTime="2025-11-24 08:37:54.788929397 +0000 UTC m=+146.905220301" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.816104 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tk2jz" podStartSLOduration=125.816088444 podStartE2EDuration="2m5.816088444s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.81487662 +0000 UTC m=+146.931167524" watchObservedRunningTime="2025-11-24 08:37:54.816088444 +0000 UTC m=+146.932379348" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.831741 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-fgz8k" podStartSLOduration=125.831710255 podStartE2EDuration="2m5.831710255s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.829699488 +0000 UTC m=+146.945990392" watchObservedRunningTime="2025-11-24 08:37:54.831710255 +0000 UTC m=+146.948001159" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.836674 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:54 crc kubenswrapper[4718]: E1124 08:37:54.837133 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:55.337084267 +0000 UTC m=+147.453375171 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.879461 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-s27jw" podStartSLOduration=125.879442163 podStartE2EDuration="2m5.879442163s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.878595359 +0000 UTC m=+146.994886273" watchObservedRunningTime="2025-11-24 08:37:54.879442163 +0000 UTC m=+146.995733077" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.880746 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-v4765" podStartSLOduration=125.880727039 podStartE2EDuration="2m5.880727039s" podCreationTimestamp="2025-11-24 08:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:54.858604784 +0000 UTC m=+146.974895698" watchObservedRunningTime="2025-11-24 08:37:54.880727039 +0000 UTC m=+146.997027843" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.938025 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:54 crc kubenswrapper[4718]: E1124 08:37:54.938465 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:55.438445348 +0000 UTC m=+147.554736332 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.993849 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.994150 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.995130 4718 patch_prober.go:28] interesting pod/apiserver-76f77b778f-dwl5h container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.6:8443/livez\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Nov 24 08:37:54 crc kubenswrapper[4718]: I1124 08:37:54.995167 4718 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" podUID="7cfb4e50-5002-4f0d-af8c-5edf8a29d87c" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.6:8443/livez\": dial tcp 10.217.0.6:8443: connect: connection refused" Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.038756 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:55 crc kubenswrapper[4718]: E1124 08:37:55.039231 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:55.539214363 +0000 UTC m=+147.655505267 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.139996 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:55 crc kubenswrapper[4718]: E1124 08:37:55.140511 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:55.640495992 +0000 UTC m=+147.756786896 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.240858 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:55 crc kubenswrapper[4718]: E1124 08:37:55.241127 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:55.741095972 +0000 UTC m=+147.857386876 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.241347 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:55 crc kubenswrapper[4718]: E1124 08:37:55.241715 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:55.741702219 +0000 UTC m=+147.857993123 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.320147 4718 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-bfsfw container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.320194 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" podUID="9d95a4c1-33fb-4dcd-83eb-5d4e0c666651" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.320882 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-c6s86" event={"ID":"a273ed05-812c-47f4-9b7a-ebc056e9102d","Type":"ContainerStarted","Data":"4822211d98075055426b58e66e1aa1c8c30a7a1707c15769e316e705a42980e7"} Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.322222 4718 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-z4pwz container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.43:8080/healthz\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.322264 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" podUID="d037ebb5-19ab-471e-b627-3b0487dfa12c" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.43:8080/healthz\": dial tcp 10.217.0.43:8080: connect: connection refused" Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.322592 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-c6s86" Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.322936 4718 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-pn2ck container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" start-of-body= Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.322992 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" podUID="8d7ad9f1-60a6-4d29-9f8b-62c147d9b0d1" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.324429 4718 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-sk6hx container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.324461 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" podUID="0b3da8f2-2160-4e82-94fa-a44757b4a481" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.342319 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:55 crc kubenswrapper[4718]: E1124 08:37:55.342709 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:55.842693 +0000 UTC m=+147.958983904 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.369205 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jdktz" Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.405876 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-c6s86" podStartSLOduration=8.405853863 podStartE2EDuration="8.405853863s" podCreationTimestamp="2025-11-24 08:37:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:55.365819593 +0000 UTC m=+147.482110517" watchObservedRunningTime="2025-11-24 08:37:55.405853863 +0000 UTC m=+147.522144767" Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.444835 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:55 crc kubenswrapper[4718]: E1124 08:37:55.446425 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:55.946407698 +0000 UTC m=+148.062698682 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.546458 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:55 crc kubenswrapper[4718]: E1124 08:37:55.546675 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.046595756 +0000 UTC m=+148.162886660 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.546774 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:55 crc kubenswrapper[4718]: E1124 08:37:55.547138 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.047129581 +0000 UTC m=+148.163420485 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.554688 4718 patch_prober.go:28] interesting pod/router-default-5444994796-jrkxw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 08:37:55 crc kubenswrapper[4718]: [-]has-synced failed: reason withheld Nov 24 08:37:55 crc kubenswrapper[4718]: [+]process-running ok Nov 24 08:37:55 crc kubenswrapper[4718]: healthz check failed Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.554747 4718 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jrkxw" podUID="47cf3417-8f00-44ea-82ca-5d60401f3754" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.648543 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:55 crc kubenswrapper[4718]: E1124 08:37:55.648767 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.14873501 +0000 UTC m=+148.265025924 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.649332 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:55 crc kubenswrapper[4718]: E1124 08:37:55.649727 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.149713267 +0000 UTC m=+148.266004361 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.751158 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:55 crc kubenswrapper[4718]: E1124 08:37:55.751311 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.251288775 +0000 UTC m=+148.367579699 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.751522 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:55 crc kubenswrapper[4718]: E1124 08:37:55.751850 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.25183649 +0000 UTC m=+148.368127404 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.769415 4718 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-sk6hx container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.769482 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" podUID="0b3da8f2-2160-4e82-94fa-a44757b4a481" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.769521 4718 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-sk6hx container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.769552 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" podUID="0b3da8f2-2160-4e82-94fa-a44757b4a481" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.852184 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:55 crc kubenswrapper[4718]: E1124 08:37:55.852291 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.352273066 +0000 UTC m=+148.468563970 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.852598 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:55 crc kubenswrapper[4718]: E1124 08:37:55.853050 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.353031587 +0000 UTC m=+148.469322591 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.953614 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:55 crc kubenswrapper[4718]: E1124 08:37:55.953808 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.453777581 +0000 UTC m=+148.570068495 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:55 crc kubenswrapper[4718]: I1124 08:37:55.953942 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:55 crc kubenswrapper[4718]: E1124 08:37:55.954345 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.454333177 +0000 UTC m=+148.570624161 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.055310 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.055508 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.555483702 +0000 UTC m=+148.671774606 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.055691 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.056048 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.556038938 +0000 UTC m=+148.672329842 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.156366 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.156578 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.656546365 +0000 UTC m=+148.772837259 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.156941 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.157308 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.657296496 +0000 UTC m=+148.773587400 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.257540 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.257752 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.757721391 +0000 UTC m=+148.874012295 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.258003 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.258319 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.758307138 +0000 UTC m=+148.874598032 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.325606 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" event={"ID":"65bbc6c4-09ab-49f3-82fe-f277e38e6dde","Type":"ContainerStarted","Data":"198feef5455415ab9114b9e6cb2f7f805723f5374a78bbb2de79cb8eaef6bbcf"} Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.359304 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.359401 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.859385121 +0000 UTC m=+148.975676025 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.359655 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.359921 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.859914016 +0000 UTC m=+148.976204920 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.461319 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.461589 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.961558226 +0000 UTC m=+149.077849130 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.461738 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.462024 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.462065 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.462164 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.463402 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:56.963390757 +0000 UTC m=+149.079681661 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.463617 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.468409 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.481565 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.553397 4718 patch_prober.go:28] interesting pod/router-default-5444994796-jrkxw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 08:37:56 crc kubenswrapper[4718]: [-]has-synced failed: reason withheld Nov 24 08:37:56 crc kubenswrapper[4718]: [+]process-running ok Nov 24 08:37:56 crc kubenswrapper[4718]: healthz check failed Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.553481 4718 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jrkxw" podUID="47cf3417-8f00-44ea-82ca-5d60401f3754" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.563072 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.563270 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:57.063229856 +0000 UTC m=+149.179520760 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.563408 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.563506 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.563912 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:57.063892025 +0000 UTC m=+149.180182929 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.578914 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.610777 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.626786 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.638216 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.669503 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.669713 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:57.169696381 +0000 UTC m=+149.285987275 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.669889 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.670199 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:57.170188765 +0000 UTC m=+149.286479669 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.771039 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.771244 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:57.271192177 +0000 UTC m=+149.387483091 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.771556 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.771911 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:57.271900277 +0000 UTC m=+149.388191181 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.875577 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.875777 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:57.375747768 +0000 UTC m=+149.492038692 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.875898 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.876293 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:57.376278453 +0000 UTC m=+149.492569377 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.897120 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qcdbm"] Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.898140 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qcdbm" Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.908579 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.919145 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qcdbm"] Nov 24 08:37:56 crc kubenswrapper[4718]: I1124 08:37:56.978127 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:56 crc kubenswrapper[4718]: E1124 08:37:56.978459 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:57.478439627 +0000 UTC m=+149.594730531 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.075608 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9zgmp"] Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.076778 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9zgmp" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.081545 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.081748 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16-utilities\") pod \"certified-operators-qcdbm\" (UID: \"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16\") " pod="openshift-marketplace/certified-operators-qcdbm" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.082139 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16-catalog-content\") pod \"certified-operators-qcdbm\" (UID: \"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16\") " pod="openshift-marketplace/certified-operators-qcdbm" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.082232 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnll9\" (UniqueName: \"kubernetes.io/projected/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16-kube-api-access-nnll9\") pod \"certified-operators-qcdbm\" (UID: \"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16\") " pod="openshift-marketplace/certified-operators-qcdbm" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.082274 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:57 crc kubenswrapper[4718]: E1124 08:37:57.082648 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:57.582635779 +0000 UTC m=+149.698926683 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.098088 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9zgmp"] Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.184685 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.184919 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16-utilities\") pod \"certified-operators-qcdbm\" (UID: \"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16\") " pod="openshift-marketplace/certified-operators-qcdbm" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.184941 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16-catalog-content\") pod \"certified-operators-qcdbm\" (UID: \"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16\") " pod="openshift-marketplace/certified-operators-qcdbm" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.185003 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnll9\" (UniqueName: \"kubernetes.io/projected/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16-kube-api-access-nnll9\") pod \"certified-operators-qcdbm\" (UID: \"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16\") " pod="openshift-marketplace/certified-operators-qcdbm" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.185047 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44264beb-2b14-47a9-9da4-18ca5e19d282-utilities\") pod \"community-operators-9zgmp\" (UID: \"44264beb-2b14-47a9-9da4-18ca5e19d282\") " pod="openshift-marketplace/community-operators-9zgmp" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.185083 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44264beb-2b14-47a9-9da4-18ca5e19d282-catalog-content\") pod \"community-operators-9zgmp\" (UID: \"44264beb-2b14-47a9-9da4-18ca5e19d282\") " pod="openshift-marketplace/community-operators-9zgmp" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.185147 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwl8p\" (UniqueName: \"kubernetes.io/projected/44264beb-2b14-47a9-9da4-18ca5e19d282-kube-api-access-lwl8p\") pod \"community-operators-9zgmp\" (UID: \"44264beb-2b14-47a9-9da4-18ca5e19d282\") " pod="openshift-marketplace/community-operators-9zgmp" Nov 24 08:37:57 crc kubenswrapper[4718]: E1124 08:37:57.185271 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:57.685254116 +0000 UTC m=+149.801545030 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.186161 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16-utilities\") pod \"certified-operators-qcdbm\" (UID: \"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16\") " pod="openshift-marketplace/certified-operators-qcdbm" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.186529 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16-catalog-content\") pod \"certified-operators-qcdbm\" (UID: \"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16\") " pod="openshift-marketplace/certified-operators-qcdbm" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.226244 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnll9\" (UniqueName: \"kubernetes.io/projected/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16-kube-api-access-nnll9\") pod \"certified-operators-qcdbm\" (UID: \"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16\") " pod="openshift-marketplace/certified-operators-qcdbm" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.286359 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwl8p\" (UniqueName: \"kubernetes.io/projected/44264beb-2b14-47a9-9da4-18ca5e19d282-kube-api-access-lwl8p\") pod \"community-operators-9zgmp\" (UID: \"44264beb-2b14-47a9-9da4-18ca5e19d282\") " pod="openshift-marketplace/community-operators-9zgmp" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.286500 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.286550 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44264beb-2b14-47a9-9da4-18ca5e19d282-utilities\") pod \"community-operators-9zgmp\" (UID: \"44264beb-2b14-47a9-9da4-18ca5e19d282\") " pod="openshift-marketplace/community-operators-9zgmp" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.286573 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44264beb-2b14-47a9-9da4-18ca5e19d282-catalog-content\") pod \"community-operators-9zgmp\" (UID: \"44264beb-2b14-47a9-9da4-18ca5e19d282\") " pod="openshift-marketplace/community-operators-9zgmp" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.287284 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44264beb-2b14-47a9-9da4-18ca5e19d282-catalog-content\") pod \"community-operators-9zgmp\" (UID: \"44264beb-2b14-47a9-9da4-18ca5e19d282\") " pod="openshift-marketplace/community-operators-9zgmp" Nov 24 08:37:57 crc kubenswrapper[4718]: E1124 08:37:57.287952 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:57.787921964 +0000 UTC m=+149.904212868 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.288155 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44264beb-2b14-47a9-9da4-18ca5e19d282-utilities\") pod \"community-operators-9zgmp\" (UID: \"44264beb-2b14-47a9-9da4-18ca5e19d282\") " pod="openshift-marketplace/community-operators-9zgmp" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.300288 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hg8cf"] Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.301411 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hg8cf" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.326275 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hg8cf"] Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.336283 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwl8p\" (UniqueName: \"kubernetes.io/projected/44264beb-2b14-47a9-9da4-18ca5e19d282-kube-api-access-lwl8p\") pod \"community-operators-9zgmp\" (UID: \"44264beb-2b14-47a9-9da4-18ca5e19d282\") " pod="openshift-marketplace/community-operators-9zgmp" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.388762 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.388922 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb5aa-31cb-4c3e-8045-8132ac10b348-utilities\") pod \"certified-operators-hg8cf\" (UID: \"e4eeb5aa-31cb-4c3e-8045-8132ac10b348\") " pod="openshift-marketplace/certified-operators-hg8cf" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.388960 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb5aa-31cb-4c3e-8045-8132ac10b348-catalog-content\") pod \"certified-operators-hg8cf\" (UID: \"e4eeb5aa-31cb-4c3e-8045-8132ac10b348\") " pod="openshift-marketplace/certified-operators-hg8cf" Nov 24 08:37:57 crc kubenswrapper[4718]: E1124 08:37:57.389075 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:57.889059089 +0000 UTC m=+150.005349993 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.389388 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.389415 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8b9b\" (UniqueName: \"kubernetes.io/projected/e4eeb5aa-31cb-4c3e-8045-8132ac10b348-kube-api-access-v8b9b\") pod \"certified-operators-hg8cf\" (UID: \"e4eeb5aa-31cb-4c3e-8045-8132ac10b348\") " pod="openshift-marketplace/certified-operators-hg8cf" Nov 24 08:37:57 crc kubenswrapper[4718]: E1124 08:37:57.389681 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:57.889673676 +0000 UTC m=+150.005964580 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.390489 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qcdbm" Nov 24 08:37:57 crc kubenswrapper[4718]: W1124 08:37:57.397520 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-8cce6aaeca8638c751dbd22c2218b9a4e44d9736b394efe2108e336328689a7e WatchSource:0}: Error finding container 8cce6aaeca8638c751dbd22c2218b9a4e44d9736b394efe2108e336328689a7e: Status 404 returned error can't find the container with id 8cce6aaeca8638c751dbd22c2218b9a4e44d9736b394efe2108e336328689a7e Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.424059 4718 generic.go:334] "Generic (PLEG): container finished" podID="da69e256-e90c-4fde-94f6-d08522e2f3da" containerID="1ad3b6069eaa47e0ae85850445d95a17ecf96f8a0a088f43d22e303dda115dd2" exitCode=0 Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.424142 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5" event={"ID":"da69e256-e90c-4fde-94f6-d08522e2f3da","Type":"ContainerDied","Data":"1ad3b6069eaa47e0ae85850445d95a17ecf96f8a0a088f43d22e303dda115dd2"} Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.433258 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9zgmp" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.504737 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.505116 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8b9b\" (UniqueName: \"kubernetes.io/projected/e4eeb5aa-31cb-4c3e-8045-8132ac10b348-kube-api-access-v8b9b\") pod \"certified-operators-hg8cf\" (UID: \"e4eeb5aa-31cb-4c3e-8045-8132ac10b348\") " pod="openshift-marketplace/certified-operators-hg8cf" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.505176 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb5aa-31cb-4c3e-8045-8132ac10b348-utilities\") pod \"certified-operators-hg8cf\" (UID: \"e4eeb5aa-31cb-4c3e-8045-8132ac10b348\") " pod="openshift-marketplace/certified-operators-hg8cf" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.505216 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb5aa-31cb-4c3e-8045-8132ac10b348-catalog-content\") pod \"certified-operators-hg8cf\" (UID: \"e4eeb5aa-31cb-4c3e-8045-8132ac10b348\") " pod="openshift-marketplace/certified-operators-hg8cf" Nov 24 08:37:57 crc kubenswrapper[4718]: E1124 08:37:57.508414 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:58.008385078 +0000 UTC m=+150.124675982 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.520981 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb5aa-31cb-4c3e-8045-8132ac10b348-catalog-content\") pod \"certified-operators-hg8cf\" (UID: \"e4eeb5aa-31cb-4c3e-8045-8132ac10b348\") " pod="openshift-marketplace/certified-operators-hg8cf" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.521118 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb5aa-31cb-4c3e-8045-8132ac10b348-utilities\") pod \"certified-operators-hg8cf\" (UID: \"e4eeb5aa-31cb-4c3e-8045-8132ac10b348\") " pod="openshift-marketplace/certified-operators-hg8cf" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.528023 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gl4gc"] Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.529078 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gl4gc" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.549294 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gl4gc"] Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.556299 4718 patch_prober.go:28] interesting pod/router-default-5444994796-jrkxw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 08:37:57 crc kubenswrapper[4718]: [-]has-synced failed: reason withheld Nov 24 08:37:57 crc kubenswrapper[4718]: [+]process-running ok Nov 24 08:37:57 crc kubenswrapper[4718]: healthz check failed Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.556363 4718 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jrkxw" podUID="47cf3417-8f00-44ea-82ca-5d60401f3754" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.585005 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8b9b\" (UniqueName: \"kubernetes.io/projected/e4eeb5aa-31cb-4c3e-8045-8132ac10b348-kube-api-access-v8b9b\") pod \"certified-operators-hg8cf\" (UID: \"e4eeb5aa-31cb-4c3e-8045-8132ac10b348\") " pod="openshift-marketplace/certified-operators-hg8cf" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.605873 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.605914 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baad1930-cdc6-4c31-b707-b3f35554f711-catalog-content\") pod \"community-operators-gl4gc\" (UID: \"baad1930-cdc6-4c31-b707-b3f35554f711\") " pod="openshift-marketplace/community-operators-gl4gc" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.606031 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baad1930-cdc6-4c31-b707-b3f35554f711-utilities\") pod \"community-operators-gl4gc\" (UID: \"baad1930-cdc6-4c31-b707-b3f35554f711\") " pod="openshift-marketplace/community-operators-gl4gc" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.606096 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvjnt\" (UniqueName: \"kubernetes.io/projected/baad1930-cdc6-4c31-b707-b3f35554f711-kube-api-access-wvjnt\") pod \"community-operators-gl4gc\" (UID: \"baad1930-cdc6-4c31-b707-b3f35554f711\") " pod="openshift-marketplace/community-operators-gl4gc" Nov 24 08:37:57 crc kubenswrapper[4718]: E1124 08:37:57.606308 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:58.106293392 +0000 UTC m=+150.222584296 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.662807 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hg8cf" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.714029 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:57 crc kubenswrapper[4718]: E1124 08:37:57.714267 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:58.214246289 +0000 UTC m=+150.330537193 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.714479 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.714503 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baad1930-cdc6-4c31-b707-b3f35554f711-catalog-content\") pod \"community-operators-gl4gc\" (UID: \"baad1930-cdc6-4c31-b707-b3f35554f711\") " pod="openshift-marketplace/community-operators-gl4gc" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.714560 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baad1930-cdc6-4c31-b707-b3f35554f711-utilities\") pod \"community-operators-gl4gc\" (UID: \"baad1930-cdc6-4c31-b707-b3f35554f711\") " pod="openshift-marketplace/community-operators-gl4gc" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.714578 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvjnt\" (UniqueName: \"kubernetes.io/projected/baad1930-cdc6-4c31-b707-b3f35554f711-kube-api-access-wvjnt\") pod \"community-operators-gl4gc\" (UID: \"baad1930-cdc6-4c31-b707-b3f35554f711\") " pod="openshift-marketplace/community-operators-gl4gc" Nov 24 08:37:57 crc kubenswrapper[4718]: E1124 08:37:57.714992 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:58.214966939 +0000 UTC m=+150.331257843 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.715332 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baad1930-cdc6-4c31-b707-b3f35554f711-catalog-content\") pod \"community-operators-gl4gc\" (UID: \"baad1930-cdc6-4c31-b707-b3f35554f711\") " pod="openshift-marketplace/community-operators-gl4gc" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.715543 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baad1930-cdc6-4c31-b707-b3f35554f711-utilities\") pod \"community-operators-gl4gc\" (UID: \"baad1930-cdc6-4c31-b707-b3f35554f711\") " pod="openshift-marketplace/community-operators-gl4gc" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.749444 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvjnt\" (UniqueName: \"kubernetes.io/projected/baad1930-cdc6-4c31-b707-b3f35554f711-kube-api-access-wvjnt\") pod \"community-operators-gl4gc\" (UID: \"baad1930-cdc6-4c31-b707-b3f35554f711\") " pod="openshift-marketplace/community-operators-gl4gc" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.817507 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:57 crc kubenswrapper[4718]: E1124 08:37:57.817815 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:58.317785962 +0000 UTC m=+150.434076866 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.818235 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:57 crc kubenswrapper[4718]: E1124 08:37:57.818552 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:58.318530943 +0000 UTC m=+150.434821847 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.858635 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gl4gc" Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.919862 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:57 crc kubenswrapper[4718]: E1124 08:37:57.920512 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:58.420498071 +0000 UTC m=+150.536788975 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:57 crc kubenswrapper[4718]: I1124 08:37:57.988488 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9zgmp"] Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.021444 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:58 crc kubenswrapper[4718]: E1124 08:37:58.021827 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:58.521814401 +0000 UTC m=+150.638105305 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.071401 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qcdbm"] Nov 24 08:37:58 crc kubenswrapper[4718]: W1124 08:37:58.094665 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0cbd20a0_57cf_428b_b9c6_2ffb52ec5e16.slice/crio-0a0e4c088cb8cf10292ce97ee87f3788833781459507b28b8f66775c268b8758 WatchSource:0}: Error finding container 0a0e4c088cb8cf10292ce97ee87f3788833781459507b28b8f66775c268b8758: Status 404 returned error can't find the container with id 0a0e4c088cb8cf10292ce97ee87f3788833781459507b28b8f66775c268b8758 Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.122232 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:58 crc kubenswrapper[4718]: E1124 08:37:58.122695 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:58.622675948 +0000 UTC m=+150.738966852 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.149241 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hg8cf"] Nov 24 08:37:58 crc kubenswrapper[4718]: W1124 08:37:58.173307 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4eeb5aa_31cb_4c3e_8045_8132ac10b348.slice/crio-b35c90010c5540aa38fd4d8b03f64fa133e2053da097596968d95f94afb89848 WatchSource:0}: Error finding container b35c90010c5540aa38fd4d8b03f64fa133e2053da097596968d95f94afb89848: Status 404 returned error can't find the container with id b35c90010c5540aa38fd4d8b03f64fa133e2053da097596968d95f94afb89848 Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.176222 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.176833 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.180542 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.180777 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.184275 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.235438 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:58 crc kubenswrapper[4718]: E1124 08:37:58.235840 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:58.735826862 +0000 UTC m=+150.852117766 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.336916 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.337306 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2881abce-fb45-42c4-895f-aaec41e8c4cc-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2881abce-fb45-42c4-895f-aaec41e8c4cc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 08:37:58 crc kubenswrapper[4718]: E1124 08:37:58.337379 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:58.837327098 +0000 UTC m=+150.953618002 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.337467 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.337641 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2881abce-fb45-42c4-895f-aaec41e8c4cc-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2881abce-fb45-42c4-895f-aaec41e8c4cc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 08:37:58 crc kubenswrapper[4718]: E1124 08:37:58.337761 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:58.83774102 +0000 UTC m=+150.954031924 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.427934 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gl4gc"] Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.441635 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.441784 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2881abce-fb45-42c4-895f-aaec41e8c4cc-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2881abce-fb45-42c4-895f-aaec41e8c4cc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.441837 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2881abce-fb45-42c4-895f-aaec41e8c4cc-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2881abce-fb45-42c4-895f-aaec41e8c4cc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.442170 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2881abce-fb45-42c4-895f-aaec41e8c4cc-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2881abce-fb45-42c4-895f-aaec41e8c4cc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 08:37:58 crc kubenswrapper[4718]: E1124 08:37:58.442212 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:58.942187658 +0000 UTC m=+151.058478562 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.445427 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" event={"ID":"65bbc6c4-09ab-49f3-82fe-f277e38e6dde","Type":"ContainerStarted","Data":"0be26306b4788541678a347731207c45e809d9ac76b6607cb70e7d503d02f282"} Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.445486 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" event={"ID":"65bbc6c4-09ab-49f3-82fe-f277e38e6dde","Type":"ContainerStarted","Data":"b4ff76c82e7b1650e4624a8a63f9e447836788b1e42c68f6d909b54efd3fbfce"} Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.447999 4718 generic.go:334] "Generic (PLEG): container finished" podID="44264beb-2b14-47a9-9da4-18ca5e19d282" containerID="70ed657792d4e97b28b5606e313fb58e64d38e6cc896aa48dfd46efc9c5978df" exitCode=0 Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.448072 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9zgmp" event={"ID":"44264beb-2b14-47a9-9da4-18ca5e19d282","Type":"ContainerDied","Data":"70ed657792d4e97b28b5606e313fb58e64d38e6cc896aa48dfd46efc9c5978df"} Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.448101 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9zgmp" event={"ID":"44264beb-2b14-47a9-9da4-18ca5e19d282","Type":"ContainerStarted","Data":"58d26c92cdf7f82553653a6a4eb1fe0959db34d53913e8a0fab0b98c3c852557"} Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.450256 4718 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.462903 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qcdbm" event={"ID":"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16","Type":"ContainerStarted","Data":"3f73af9d5d4a5ed1e40e305540bca354d03cf203c9d548c4a06d724c38111dd9"} Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.462947 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qcdbm" event={"ID":"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16","Type":"ContainerStarted","Data":"0a0e4c088cb8cf10292ce97ee87f3788833781459507b28b8f66775c268b8758"} Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.472378 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2881abce-fb45-42c4-895f-aaec41e8c4cc-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2881abce-fb45-42c4-895f-aaec41e8c4cc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.485067 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hg8cf" event={"ID":"e4eeb5aa-31cb-4c3e-8045-8132ac10b348","Type":"ContainerStarted","Data":"a59ee3d3be05b598589c108616fa0961d8ea8da823061c581dc57cda2ef25f46"} Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.485114 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hg8cf" event={"ID":"e4eeb5aa-31cb-4c3e-8045-8132ac10b348","Type":"ContainerStarted","Data":"b35c90010c5540aa38fd4d8b03f64fa133e2053da097596968d95f94afb89848"} Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.498594 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"2f4c00724ad50bc70c771df70b9d225cd099a2bf38fe1b55aa5da676245b21d1"} Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.498642 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"8cce6aaeca8638c751dbd22c2218b9a4e44d9736b394efe2108e336328689a7e"} Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.500306 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"609c6f7b50b4c2c3e3a218f260e6a73b5054df457f6de4a992efeed839cf811e"} Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.500334 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"16db325e85545e9bf9b0cf29b4e9d235e38fcaee588d66d7f2eb96751a168668"} Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.501640 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"59f05c65d914e1324f40c2afc931e2c934d2980e4fc2b6a2951e0efa9497b013"} Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.501666 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"80523d61261002462e6208aa41457966b7a125628f70a0e9048055e5c1dee68e"} Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.501963 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.524407 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.543004 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:58 crc kubenswrapper[4718]: E1124 08:37:58.543354 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:59.043343374 +0000 UTC m=+151.159634278 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.551460 4718 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.555586 4718 patch_prober.go:28] interesting pod/router-default-5444994796-jrkxw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 08:37:58 crc kubenswrapper[4718]: [-]has-synced failed: reason withheld Nov 24 08:37:58 crc kubenswrapper[4718]: [+]process-running ok Nov 24 08:37:58 crc kubenswrapper[4718]: healthz check failed Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.555643 4718 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jrkxw" podUID="47cf3417-8f00-44ea-82ca-5d60401f3754" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 08:37:58 crc kubenswrapper[4718]: W1124 08:37:58.561505 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbaad1930_cdc6_4c31_b707_b3f35554f711.slice/crio-5d669ebb0744bdf8baacd66bea0501f412d377002327418801db21738963a6c1 WatchSource:0}: Error finding container 5d669ebb0744bdf8baacd66bea0501f412d377002327418801db21738963a6c1: Status 404 returned error can't find the container with id 5d669ebb0744bdf8baacd66bea0501f412d377002327418801db21738963a6c1 Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.652028 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:58 crc kubenswrapper[4718]: E1124 08:37:58.653006 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 08:37:59.152985159 +0000 UTC m=+151.269276063 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.754025 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:58 crc kubenswrapper[4718]: E1124 08:37:58.754750 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 08:37:59.254733791 +0000 UTC m=+151.371024705 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-r2v9t" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.780291 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sk6hx" Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.784385 4718 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-11-24T08:37:58.551489214Z","Handler":null,"Name":""} Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.793858 4718 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.793895 4718 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.856563 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.864759 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.957810 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.961662 4718 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 24 08:37:58 crc kubenswrapper[4718]: I1124 08:37:58.961886 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.027300 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-r2v9t\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.049177 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.068622 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-w8rfg"] Nov 24 08:37:59 crc kubenswrapper[4718]: E1124 08:37:59.068865 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da69e256-e90c-4fde-94f6-d08522e2f3da" containerName="collect-profiles" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.068879 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="da69e256-e90c-4fde-94f6-d08522e2f3da" containerName="collect-profiles" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.069027 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="da69e256-e90c-4fde-94f6-d08522e2f3da" containerName="collect-profiles" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.069898 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w8rfg" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.097023 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.109471 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w8rfg"] Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.162438 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da69e256-e90c-4fde-94f6-d08522e2f3da-config-volume\") pod \"da69e256-e90c-4fde-94f6-d08522e2f3da\" (UID: \"da69e256-e90c-4fde-94f6-d08522e2f3da\") " Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.162535 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8slcq\" (UniqueName: \"kubernetes.io/projected/da69e256-e90c-4fde-94f6-d08522e2f3da-kube-api-access-8slcq\") pod \"da69e256-e90c-4fde-94f6-d08522e2f3da\" (UID: \"da69e256-e90c-4fde-94f6-d08522e2f3da\") " Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.162580 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/da69e256-e90c-4fde-94f6-d08522e2f3da-secret-volume\") pod \"da69e256-e90c-4fde-94f6-d08522e2f3da\" (UID: \"da69e256-e90c-4fde-94f6-d08522e2f3da\") " Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.162778 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-664lq\" (UniqueName: \"kubernetes.io/projected/39df5500-d2c6-4c61-be65-cc5598f8201d-kube-api-access-664lq\") pod \"redhat-marketplace-w8rfg\" (UID: \"39df5500-d2c6-4c61-be65-cc5598f8201d\") " pod="openshift-marketplace/redhat-marketplace-w8rfg" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.162843 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39df5500-d2c6-4c61-be65-cc5598f8201d-utilities\") pod \"redhat-marketplace-w8rfg\" (UID: \"39df5500-d2c6-4c61-be65-cc5598f8201d\") " pod="openshift-marketplace/redhat-marketplace-w8rfg" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.162868 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39df5500-d2c6-4c61-be65-cc5598f8201d-catalog-content\") pod \"redhat-marketplace-w8rfg\" (UID: \"39df5500-d2c6-4c61-be65-cc5598f8201d\") " pod="openshift-marketplace/redhat-marketplace-w8rfg" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.163796 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da69e256-e90c-4fde-94f6-d08522e2f3da-config-volume" (OuterVolumeSpecName: "config-volume") pod "da69e256-e90c-4fde-94f6-d08522e2f3da" (UID: "da69e256-e90c-4fde-94f6-d08522e2f3da"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.184730 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da69e256-e90c-4fde-94f6-d08522e2f3da-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "da69e256-e90c-4fde-94f6-d08522e2f3da" (UID: "da69e256-e90c-4fde-94f6-d08522e2f3da"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.209326 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da69e256-e90c-4fde-94f6-d08522e2f3da-kube-api-access-8slcq" (OuterVolumeSpecName: "kube-api-access-8slcq") pod "da69e256-e90c-4fde-94f6-d08522e2f3da" (UID: "da69e256-e90c-4fde-94f6-d08522e2f3da"). InnerVolumeSpecName "kube-api-access-8slcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.263830 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-664lq\" (UniqueName: \"kubernetes.io/projected/39df5500-d2c6-4c61-be65-cc5598f8201d-kube-api-access-664lq\") pod \"redhat-marketplace-w8rfg\" (UID: \"39df5500-d2c6-4c61-be65-cc5598f8201d\") " pod="openshift-marketplace/redhat-marketplace-w8rfg" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.263942 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39df5500-d2c6-4c61-be65-cc5598f8201d-utilities\") pod \"redhat-marketplace-w8rfg\" (UID: \"39df5500-d2c6-4c61-be65-cc5598f8201d\") " pod="openshift-marketplace/redhat-marketplace-w8rfg" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.263987 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39df5500-d2c6-4c61-be65-cc5598f8201d-catalog-content\") pod \"redhat-marketplace-w8rfg\" (UID: \"39df5500-d2c6-4c61-be65-cc5598f8201d\") " pod="openshift-marketplace/redhat-marketplace-w8rfg" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.264062 4718 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da69e256-e90c-4fde-94f6-d08522e2f3da-config-volume\") on node \"crc\" DevicePath \"\"" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.264078 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8slcq\" (UniqueName: \"kubernetes.io/projected/da69e256-e90c-4fde-94f6-d08522e2f3da-kube-api-access-8slcq\") on node \"crc\" DevicePath \"\"" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.264091 4718 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/da69e256-e90c-4fde-94f6-d08522e2f3da-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.264615 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39df5500-d2c6-4c61-be65-cc5598f8201d-catalog-content\") pod \"redhat-marketplace-w8rfg\" (UID: \"39df5500-d2c6-4c61-be65-cc5598f8201d\") " pod="openshift-marketplace/redhat-marketplace-w8rfg" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.264670 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39df5500-d2c6-4c61-be65-cc5598f8201d-utilities\") pod \"redhat-marketplace-w8rfg\" (UID: \"39df5500-d2c6-4c61-be65-cc5598f8201d\") " pod="openshift-marketplace/redhat-marketplace-w8rfg" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.278782 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.285010 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-664lq\" (UniqueName: \"kubernetes.io/projected/39df5500-d2c6-4c61-be65-cc5598f8201d-kube-api-access-664lq\") pod \"redhat-marketplace-w8rfg\" (UID: \"39df5500-d2c6-4c61-be65-cc5598f8201d\") " pod="openshift-marketplace/redhat-marketplace-w8rfg" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.389955 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.404872 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w8rfg" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.468184 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gwf8f"] Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.471423 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gwf8f" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.472468 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gwf8f"] Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.520692 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-r2v9t"] Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.536289 4718 generic.go:334] "Generic (PLEG): container finished" podID="0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16" containerID="3f73af9d5d4a5ed1e40e305540bca354d03cf203c9d548c4a06d724c38111dd9" exitCode=0 Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.536412 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qcdbm" event={"ID":"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16","Type":"ContainerDied","Data":"3f73af9d5d4a5ed1e40e305540bca354d03cf203c9d548c4a06d724c38111dd9"} Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.554996 4718 generic.go:334] "Generic (PLEG): container finished" podID="e4eeb5aa-31cb-4c3e-8045-8132ac10b348" containerID="a59ee3d3be05b598589c108616fa0961d8ea8da823061c581dc57cda2ef25f46" exitCode=0 Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.555063 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hg8cf" event={"ID":"e4eeb5aa-31cb-4c3e-8045-8132ac10b348","Type":"ContainerDied","Data":"a59ee3d3be05b598589c108616fa0961d8ea8da823061c581dc57cda2ef25f46"} Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.558864 4718 patch_prober.go:28] interesting pod/router-default-5444994796-jrkxw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 08:37:59 crc kubenswrapper[4718]: [-]has-synced failed: reason withheld Nov 24 08:37:59 crc kubenswrapper[4718]: [+]process-running ok Nov 24 08:37:59 crc kubenswrapper[4718]: healthz check failed Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.558910 4718 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jrkxw" podUID="47cf3417-8f00-44ea-82ca-5d60401f3754" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.567187 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2881abce-fb45-42c4-895f-aaec41e8c4cc","Type":"ContainerStarted","Data":"ab3b34406e5fa76bae4936f3ae5f0c866d06938d1bc9ee57c961d2b3701f5c29"} Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.567805 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72a76ec8-c870-4c07-a703-a4ac3e9e97e8-catalog-content\") pod \"redhat-marketplace-gwf8f\" (UID: \"72a76ec8-c870-4c07-a703-a4ac3e9e97e8\") " pod="openshift-marketplace/redhat-marketplace-gwf8f" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.567834 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72a76ec8-c870-4c07-a703-a4ac3e9e97e8-utilities\") pod \"redhat-marketplace-gwf8f\" (UID: \"72a76ec8-c870-4c07-a703-a4ac3e9e97e8\") " pod="openshift-marketplace/redhat-marketplace-gwf8f" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.567854 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjjhh\" (UniqueName: \"kubernetes.io/projected/72a76ec8-c870-4c07-a703-a4ac3e9e97e8-kube-api-access-fjjhh\") pod \"redhat-marketplace-gwf8f\" (UID: \"72a76ec8-c870-4c07-a703-a4ac3e9e97e8\") " pod="openshift-marketplace/redhat-marketplace-gwf8f" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.572047 4718 generic.go:334] "Generic (PLEG): container finished" podID="baad1930-cdc6-4c31-b707-b3f35554f711" containerID="229a4214fa6a6c6f5f30ea0c2a915b852eeca6d3f428d880d82b29754136bcb1" exitCode=0 Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.572156 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gl4gc" event={"ID":"baad1930-cdc6-4c31-b707-b3f35554f711","Type":"ContainerDied","Data":"229a4214fa6a6c6f5f30ea0c2a915b852eeca6d3f428d880d82b29754136bcb1"} Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.572187 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gl4gc" event={"ID":"baad1930-cdc6-4c31-b707-b3f35554f711","Type":"ContainerStarted","Data":"5d669ebb0744bdf8baacd66bea0501f412d377002327418801db21738963a6c1"} Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.579019 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.580211 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399550-lxjc5" event={"ID":"da69e256-e90c-4fde-94f6-d08522e2f3da","Type":"ContainerDied","Data":"967cd539aae2777f852c07864b7c1292ee29c9c32c7646910cf970ac7c4384ea"} Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.580252 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="967cd539aae2777f852c07864b7c1292ee29c9c32c7646910cf970ac7c4384ea" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.597276 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" event={"ID":"65bbc6c4-09ab-49f3-82fe-f277e38e6dde","Type":"ContainerStarted","Data":"e9939afb28b4d04a654d71a6beeb849804bf6821670f9190054ef79fed3a87ad"} Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.619774 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-s8r6x" podStartSLOduration=12.61971637 podStartE2EDuration="12.61971637s" podCreationTimestamp="2025-11-24 08:37:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:37:59.619626037 +0000 UTC m=+151.735916961" watchObservedRunningTime="2025-11-24 08:37:59.61971637 +0000 UTC m=+151.736007274" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.669359 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjjhh\" (UniqueName: \"kubernetes.io/projected/72a76ec8-c870-4c07-a703-a4ac3e9e97e8-kube-api-access-fjjhh\") pod \"redhat-marketplace-gwf8f\" (UID: \"72a76ec8-c870-4c07-a703-a4ac3e9e97e8\") " pod="openshift-marketplace/redhat-marketplace-gwf8f" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.669572 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72a76ec8-c870-4c07-a703-a4ac3e9e97e8-catalog-content\") pod \"redhat-marketplace-gwf8f\" (UID: \"72a76ec8-c870-4c07-a703-a4ac3e9e97e8\") " pod="openshift-marketplace/redhat-marketplace-gwf8f" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.669737 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72a76ec8-c870-4c07-a703-a4ac3e9e97e8-utilities\") pod \"redhat-marketplace-gwf8f\" (UID: \"72a76ec8-c870-4c07-a703-a4ac3e9e97e8\") " pod="openshift-marketplace/redhat-marketplace-gwf8f" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.670388 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72a76ec8-c870-4c07-a703-a4ac3e9e97e8-utilities\") pod \"redhat-marketplace-gwf8f\" (UID: \"72a76ec8-c870-4c07-a703-a4ac3e9e97e8\") " pod="openshift-marketplace/redhat-marketplace-gwf8f" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.676133 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72a76ec8-c870-4c07-a703-a4ac3e9e97e8-catalog-content\") pod \"redhat-marketplace-gwf8f\" (UID: \"72a76ec8-c870-4c07-a703-a4ac3e9e97e8\") " pod="openshift-marketplace/redhat-marketplace-gwf8f" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.703621 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjjhh\" (UniqueName: \"kubernetes.io/projected/72a76ec8-c870-4c07-a703-a4ac3e9e97e8-kube-api-access-fjjhh\") pod \"redhat-marketplace-gwf8f\" (UID: \"72a76ec8-c870-4c07-a703-a4ac3e9e97e8\") " pod="openshift-marketplace/redhat-marketplace-gwf8f" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.707243 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w8rfg"] Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.741120 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.748323 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jmjcp" Nov 24 08:37:59 crc kubenswrapper[4718]: I1124 08:37:59.793369 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gwf8f" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.007627 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.016506 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-dwl5h" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.075873 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gwf8f"] Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.088245 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zpbbq"] Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.090429 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zpbbq" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.098137 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.103145 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zpbbq"] Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.127199 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.127229 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.163722 4718 patch_prober.go:28] interesting pod/console-f9d7485db-x2j5v container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.164212 4718 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-x2j5v" podUID="b17e4ccd-f166-4933-99ec-ef4a0445ef30" containerName="console" probeResult="failure" output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.186543 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5-catalog-content\") pod \"redhat-operators-zpbbq\" (UID: \"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5\") " pod="openshift-marketplace/redhat-operators-zpbbq" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.186661 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jklfx\" (UniqueName: \"kubernetes.io/projected/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5-kube-api-access-jklfx\") pod \"redhat-operators-zpbbq\" (UID: \"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5\") " pod="openshift-marketplace/redhat-operators-zpbbq" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.186696 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5-utilities\") pod \"redhat-operators-zpbbq\" (UID: \"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5\") " pod="openshift-marketplace/redhat-operators-zpbbq" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.289963 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5-catalog-content\") pod \"redhat-operators-zpbbq\" (UID: \"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5\") " pod="openshift-marketplace/redhat-operators-zpbbq" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.290330 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jklfx\" (UniqueName: \"kubernetes.io/projected/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5-kube-api-access-jklfx\") pod \"redhat-operators-zpbbq\" (UID: \"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5\") " pod="openshift-marketplace/redhat-operators-zpbbq" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.290447 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5-utilities\") pod \"redhat-operators-zpbbq\" (UID: \"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5\") " pod="openshift-marketplace/redhat-operators-zpbbq" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.293566 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5-utilities\") pod \"redhat-operators-zpbbq\" (UID: \"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5\") " pod="openshift-marketplace/redhat-operators-zpbbq" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.293740 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5-catalog-content\") pod \"redhat-operators-zpbbq\" (UID: \"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5\") " pod="openshift-marketplace/redhat-operators-zpbbq" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.319343 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jklfx\" (UniqueName: \"kubernetes.io/projected/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5-kube-api-access-jklfx\") pod \"redhat-operators-zpbbq\" (UID: \"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5\") " pod="openshift-marketplace/redhat-operators-zpbbq" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.392467 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfsfw" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.454183 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pn2ck" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.465070 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zpbbq" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.465135 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lvfl4"] Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.466269 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lvfl4" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.488167 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lvfl4"] Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.511112 4718 patch_prober.go:28] interesting pod/downloads-7954f5f757-fw72r container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.511165 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-fw72r" podUID="15854168-726d-44b5-80e7-d1ca941c2941" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.511203 4718 patch_prober.go:28] interesting pod/downloads-7954f5f757-fw72r container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.511242 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-fw72r" podUID="15854168-726d-44b5-80e7-d1ca941c2941" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.532869 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-sqk2g" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.557254 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.569569 4718 patch_prober.go:28] interesting pod/router-default-5444994796-jrkxw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 08:38:00 crc kubenswrapper[4718]: [-]has-synced failed: reason withheld Nov 24 08:38:00 crc kubenswrapper[4718]: [+]process-running ok Nov 24 08:38:00 crc kubenswrapper[4718]: healthz check failed Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.569624 4718 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jrkxw" podUID="47cf3417-8f00-44ea-82ca-5d60401f3754" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.598229 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c66ac439-dc6b-46ee-9dd8-1488f7730fa6-catalog-content\") pod \"redhat-operators-lvfl4\" (UID: \"c66ac439-dc6b-46ee-9dd8-1488f7730fa6\") " pod="openshift-marketplace/redhat-operators-lvfl4" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.598285 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c66ac439-dc6b-46ee-9dd8-1488f7730fa6-utilities\") pod \"redhat-operators-lvfl4\" (UID: \"c66ac439-dc6b-46ee-9dd8-1488f7730fa6\") " pod="openshift-marketplace/redhat-operators-lvfl4" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.598310 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zmnr\" (UniqueName: \"kubernetes.io/projected/c66ac439-dc6b-46ee-9dd8-1488f7730fa6-kube-api-access-2zmnr\") pod \"redhat-operators-lvfl4\" (UID: \"c66ac439-dc6b-46ee-9dd8-1488f7730fa6\") " pod="openshift-marketplace/redhat-operators-lvfl4" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.609470 4718 generic.go:334] "Generic (PLEG): container finished" podID="2881abce-fb45-42c4-895f-aaec41e8c4cc" containerID="785e78a27dbee3c37e25371ec9e4ec28bedbed0b9c09f8991ccee720bc40cc8e" exitCode=0 Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.616439 4718 generic.go:334] "Generic (PLEG): container finished" podID="72a76ec8-c870-4c07-a703-a4ac3e9e97e8" containerID="0a3c5cec93bbe6084b8aa02e12dfb827bed67e94471affc6a139d8bb74454e55" exitCode=0 Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.630153 4718 generic.go:334] "Generic (PLEG): container finished" podID="39df5500-d2c6-4c61-be65-cc5598f8201d" containerID="59d09d611f0cdc61e1aeeb9135ad9a73e4ec19be072acc457e95a9b485940763" exitCode=0 Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.632354 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.632958 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.641102 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.641127 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2881abce-fb45-42c4-895f-aaec41e8c4cc","Type":"ContainerDied","Data":"785e78a27dbee3c37e25371ec9e4ec28bedbed0b9c09f8991ccee720bc40cc8e"} Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.641151 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" event={"ID":"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb","Type":"ContainerStarted","Data":"161b52c8266a4dcfef56e10381ca93a99b88f2762d394670a7b8151112df21dd"} Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.641163 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" event={"ID":"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb","Type":"ContainerStarted","Data":"56766967d3a6adcccd33675227c51c5eb8ba73365c2060e81187296a97573e34"} Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.641172 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gwf8f" event={"ID":"72a76ec8-c870-4c07-a703-a4ac3e9e97e8","Type":"ContainerDied","Data":"0a3c5cec93bbe6084b8aa02e12dfb827bed67e94471affc6a139d8bb74454e55"} Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.641185 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gwf8f" event={"ID":"72a76ec8-c870-4c07-a703-a4ac3e9e97e8","Type":"ContainerStarted","Data":"83608858fc435fb1a9a4bba6757cc006e401f1d47abb361fc405bc7936dd8287"} Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.641195 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w8rfg" event={"ID":"39df5500-d2c6-4c61-be65-cc5598f8201d","Type":"ContainerDied","Data":"59d09d611f0cdc61e1aeeb9135ad9a73e4ec19be072acc457e95a9b485940763"} Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.641207 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w8rfg" event={"ID":"39df5500-d2c6-4c61-be65-cc5598f8201d","Type":"ContainerStarted","Data":"fb12512064c63d06fa696e87f97a2ee8086ee56e9cbb9e0fd0fe12521d5b059d"} Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.672338 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" podStartSLOduration=132.672319685 podStartE2EDuration="2m12.672319685s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:38:00.651490027 +0000 UTC m=+152.767780951" watchObservedRunningTime="2025-11-24 08:38:00.672319685 +0000 UTC m=+152.788610589" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.700631 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c66ac439-dc6b-46ee-9dd8-1488f7730fa6-catalog-content\") pod \"redhat-operators-lvfl4\" (UID: \"c66ac439-dc6b-46ee-9dd8-1488f7730fa6\") " pod="openshift-marketplace/redhat-operators-lvfl4" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.700710 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c66ac439-dc6b-46ee-9dd8-1488f7730fa6-utilities\") pod \"redhat-operators-lvfl4\" (UID: \"c66ac439-dc6b-46ee-9dd8-1488f7730fa6\") " pod="openshift-marketplace/redhat-operators-lvfl4" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.700744 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zmnr\" (UniqueName: \"kubernetes.io/projected/c66ac439-dc6b-46ee-9dd8-1488f7730fa6-kube-api-access-2zmnr\") pod \"redhat-operators-lvfl4\" (UID: \"c66ac439-dc6b-46ee-9dd8-1488f7730fa6\") " pod="openshift-marketplace/redhat-operators-lvfl4" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.703944 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c66ac439-dc6b-46ee-9dd8-1488f7730fa6-catalog-content\") pod \"redhat-operators-lvfl4\" (UID: \"c66ac439-dc6b-46ee-9dd8-1488f7730fa6\") " pod="openshift-marketplace/redhat-operators-lvfl4" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.704747 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c66ac439-dc6b-46ee-9dd8-1488f7730fa6-utilities\") pod \"redhat-operators-lvfl4\" (UID: \"c66ac439-dc6b-46ee-9dd8-1488f7730fa6\") " pod="openshift-marketplace/redhat-operators-lvfl4" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.785592 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.793528 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zmnr\" (UniqueName: \"kubernetes.io/projected/c66ac439-dc6b-46ee-9dd8-1488f7730fa6-kube-api-access-2zmnr\") pod \"redhat-operators-lvfl4\" (UID: \"c66ac439-dc6b-46ee-9dd8-1488f7730fa6\") " pod="openshift-marketplace/redhat-operators-lvfl4" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.846423 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lvfl4" Nov 24 08:38:00 crc kubenswrapper[4718]: I1124 08:38:00.942414 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zpbbq"] Nov 24 08:38:01 crc kubenswrapper[4718]: I1124 08:38:01.325761 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lvfl4"] Nov 24 08:38:01 crc kubenswrapper[4718]: I1124 08:38:01.562859 4718 patch_prober.go:28] interesting pod/router-default-5444994796-jrkxw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 08:38:01 crc kubenswrapper[4718]: [-]has-synced failed: reason withheld Nov 24 08:38:01 crc kubenswrapper[4718]: [+]process-running ok Nov 24 08:38:01 crc kubenswrapper[4718]: healthz check failed Nov 24 08:38:01 crc kubenswrapper[4718]: I1124 08:38:01.563241 4718 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jrkxw" podUID="47cf3417-8f00-44ea-82ca-5d60401f3754" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 08:38:01 crc kubenswrapper[4718]: I1124 08:38:01.692546 4718 generic.go:334] "Generic (PLEG): container finished" podID="3f4a042f-effa-4f5a-ac2d-4d378b0f15a5" containerID="8947bec93eb0601876a82be203112baad59ea498cd5b11c032e7e7d738c5559f" exitCode=0 Nov 24 08:38:01 crc kubenswrapper[4718]: I1124 08:38:01.692664 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zpbbq" event={"ID":"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5","Type":"ContainerDied","Data":"8947bec93eb0601876a82be203112baad59ea498cd5b11c032e7e7d738c5559f"} Nov 24 08:38:01 crc kubenswrapper[4718]: I1124 08:38:01.692699 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zpbbq" event={"ID":"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5","Type":"ContainerStarted","Data":"d46d7e368fac22d40f6dc30b541ba34fa08db64dbde864fffad45335dcf5c8ff"} Nov 24 08:38:01 crc kubenswrapper[4718]: I1124 08:38:01.700398 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lvfl4" event={"ID":"c66ac439-dc6b-46ee-9dd8-1488f7730fa6","Type":"ContainerStarted","Data":"4c776c0125c2cd0770c40ca631fcebe878db03300dcad007f871fbadf11cd655"} Nov 24 08:38:02 crc kubenswrapper[4718]: I1124 08:38:02.114722 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 08:38:02 crc kubenswrapper[4718]: I1124 08:38:02.243884 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2881abce-fb45-42c4-895f-aaec41e8c4cc-kube-api-access\") pod \"2881abce-fb45-42c4-895f-aaec41e8c4cc\" (UID: \"2881abce-fb45-42c4-895f-aaec41e8c4cc\") " Nov 24 08:38:02 crc kubenswrapper[4718]: I1124 08:38:02.244148 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2881abce-fb45-42c4-895f-aaec41e8c4cc-kubelet-dir\") pod \"2881abce-fb45-42c4-895f-aaec41e8c4cc\" (UID: \"2881abce-fb45-42c4-895f-aaec41e8c4cc\") " Nov 24 08:38:02 crc kubenswrapper[4718]: I1124 08:38:02.244305 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2881abce-fb45-42c4-895f-aaec41e8c4cc-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2881abce-fb45-42c4-895f-aaec41e8c4cc" (UID: "2881abce-fb45-42c4-895f-aaec41e8c4cc"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:38:02 crc kubenswrapper[4718]: I1124 08:38:02.244714 4718 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2881abce-fb45-42c4-895f-aaec41e8c4cc-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 24 08:38:02 crc kubenswrapper[4718]: I1124 08:38:02.250703 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2881abce-fb45-42c4-895f-aaec41e8c4cc-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2881abce-fb45-42c4-895f-aaec41e8c4cc" (UID: "2881abce-fb45-42c4-895f-aaec41e8c4cc"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:38:02 crc kubenswrapper[4718]: I1124 08:38:02.347650 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2881abce-fb45-42c4-895f-aaec41e8c4cc-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 24 08:38:02 crc kubenswrapper[4718]: I1124 08:38:02.554125 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:38:02 crc kubenswrapper[4718]: I1124 08:38:02.557454 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-jrkxw" Nov 24 08:38:02 crc kubenswrapper[4718]: I1124 08:38:02.729680 4718 generic.go:334] "Generic (PLEG): container finished" podID="c66ac439-dc6b-46ee-9dd8-1488f7730fa6" containerID="828e6ac4f5de745ff1391d490018cb3ddd128868a1b31c9af809e0d189358c00" exitCode=0 Nov 24 08:38:02 crc kubenswrapper[4718]: I1124 08:38:02.729795 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lvfl4" event={"ID":"c66ac439-dc6b-46ee-9dd8-1488f7730fa6","Type":"ContainerDied","Data":"828e6ac4f5de745ff1391d490018cb3ddd128868a1b31c9af809e0d189358c00"} Nov 24 08:38:02 crc kubenswrapper[4718]: I1124 08:38:02.734216 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 08:38:02 crc kubenswrapper[4718]: I1124 08:38:02.736007 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2881abce-fb45-42c4-895f-aaec41e8c4cc","Type":"ContainerDied","Data":"ab3b34406e5fa76bae4936f3ae5f0c866d06938d1bc9ee57c961d2b3701f5c29"} Nov 24 08:38:02 crc kubenswrapper[4718]: I1124 08:38:02.736069 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab3b34406e5fa76bae4936f3ae5f0c866d06938d1bc9ee57c961d2b3701f5c29" Nov 24 08:38:03 crc kubenswrapper[4718]: I1124 08:38:03.045617 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 24 08:38:03 crc kubenswrapper[4718]: E1124 08:38:03.046558 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2881abce-fb45-42c4-895f-aaec41e8c4cc" containerName="pruner" Nov 24 08:38:03 crc kubenswrapper[4718]: I1124 08:38:03.046574 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="2881abce-fb45-42c4-895f-aaec41e8c4cc" containerName="pruner" Nov 24 08:38:03 crc kubenswrapper[4718]: I1124 08:38:03.046683 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="2881abce-fb45-42c4-895f-aaec41e8c4cc" containerName="pruner" Nov 24 08:38:03 crc kubenswrapper[4718]: I1124 08:38:03.047056 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 08:38:03 crc kubenswrapper[4718]: I1124 08:38:03.050173 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 24 08:38:03 crc kubenswrapper[4718]: I1124 08:38:03.050753 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 24 08:38:03 crc kubenswrapper[4718]: I1124 08:38:03.060075 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 24 08:38:03 crc kubenswrapper[4718]: I1124 08:38:03.070868 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-c6s86" Nov 24 08:38:03 crc kubenswrapper[4718]: I1124 08:38:03.182805 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/74d63734-18c2-48c4-b472-bea0a1cb43e0-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"74d63734-18c2-48c4-b472-bea0a1cb43e0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 08:38:03 crc kubenswrapper[4718]: I1124 08:38:03.182880 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/74d63734-18c2-48c4-b472-bea0a1cb43e0-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"74d63734-18c2-48c4-b472-bea0a1cb43e0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 08:38:03 crc kubenswrapper[4718]: I1124 08:38:03.284870 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/74d63734-18c2-48c4-b472-bea0a1cb43e0-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"74d63734-18c2-48c4-b472-bea0a1cb43e0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 08:38:03 crc kubenswrapper[4718]: I1124 08:38:03.284931 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/74d63734-18c2-48c4-b472-bea0a1cb43e0-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"74d63734-18c2-48c4-b472-bea0a1cb43e0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 08:38:03 crc kubenswrapper[4718]: I1124 08:38:03.285004 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/74d63734-18c2-48c4-b472-bea0a1cb43e0-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"74d63734-18c2-48c4-b472-bea0a1cb43e0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 08:38:03 crc kubenswrapper[4718]: I1124 08:38:03.313241 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/74d63734-18c2-48c4-b472-bea0a1cb43e0-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"74d63734-18c2-48c4-b472-bea0a1cb43e0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 08:38:03 crc kubenswrapper[4718]: I1124 08:38:03.390739 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 08:38:03 crc kubenswrapper[4718]: I1124 08:38:03.833457 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 24 08:38:03 crc kubenswrapper[4718]: W1124 08:38:03.896120 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod74d63734_18c2_48c4_b472_bea0a1cb43e0.slice/crio-fdaf745c83352d98b3a151ac41ffb5602879abb486b39bf9e7ebb13de4037263 WatchSource:0}: Error finding container fdaf745c83352d98b3a151ac41ffb5602879abb486b39bf9e7ebb13de4037263: Status 404 returned error can't find the container with id fdaf745c83352d98b3a151ac41ffb5602879abb486b39bf9e7ebb13de4037263 Nov 24 08:38:04 crc kubenswrapper[4718]: I1124 08:38:04.753401 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"74d63734-18c2-48c4-b472-bea0a1cb43e0","Type":"ContainerStarted","Data":"fdaf745c83352d98b3a151ac41ffb5602879abb486b39bf9e7ebb13de4037263"} Nov 24 08:38:05 crc kubenswrapper[4718]: I1124 08:38:05.774629 4718 generic.go:334] "Generic (PLEG): container finished" podID="74d63734-18c2-48c4-b472-bea0a1cb43e0" containerID="79e21fdfb670c92c9ed67fe73e2dc085f794b81278e429db7d0ccc7c89c552ef" exitCode=0 Nov 24 08:38:05 crc kubenswrapper[4718]: I1124 08:38:05.774805 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"74d63734-18c2-48c4-b472-bea0a1cb43e0","Type":"ContainerDied","Data":"79e21fdfb670c92c9ed67fe73e2dc085f794b81278e429db7d0ccc7c89c552ef"} Nov 24 08:38:10 crc kubenswrapper[4718]: I1124 08:38:10.134186 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:38:10 crc kubenswrapper[4718]: I1124 08:38:10.138869 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-x2j5v" Nov 24 08:38:10 crc kubenswrapper[4718]: I1124 08:38:10.230351 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs\") pod \"network-metrics-daemon-ctdmz\" (UID: \"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\") " pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:38:10 crc kubenswrapper[4718]: I1124 08:38:10.241952 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97-metrics-certs\") pod \"network-metrics-daemon-ctdmz\" (UID: \"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97\") " pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:38:10 crc kubenswrapper[4718]: I1124 08:38:10.417722 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ctdmz" Nov 24 08:38:10 crc kubenswrapper[4718]: I1124 08:38:10.508677 4718 patch_prober.go:28] interesting pod/downloads-7954f5f757-fw72r container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 24 08:38:10 crc kubenswrapper[4718]: I1124 08:38:10.508725 4718 patch_prober.go:28] interesting pod/downloads-7954f5f757-fw72r container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 24 08:38:10 crc kubenswrapper[4718]: I1124 08:38:10.508741 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-fw72r" podUID="15854168-726d-44b5-80e7-d1ca941c2941" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 24 08:38:10 crc kubenswrapper[4718]: I1124 08:38:10.508776 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-fw72r" podUID="15854168-726d-44b5-80e7-d1ca941c2941" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 24 08:38:19 crc kubenswrapper[4718]: I1124 08:38:19.284073 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:38:19 crc kubenswrapper[4718]: I1124 08:38:19.414876 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 08:38:19 crc kubenswrapper[4718]: I1124 08:38:19.504248 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/74d63734-18c2-48c4-b472-bea0a1cb43e0-kube-api-access\") pod \"74d63734-18c2-48c4-b472-bea0a1cb43e0\" (UID: \"74d63734-18c2-48c4-b472-bea0a1cb43e0\") " Nov 24 08:38:19 crc kubenswrapper[4718]: I1124 08:38:19.504581 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/74d63734-18c2-48c4-b472-bea0a1cb43e0-kubelet-dir\") pod \"74d63734-18c2-48c4-b472-bea0a1cb43e0\" (UID: \"74d63734-18c2-48c4-b472-bea0a1cb43e0\") " Nov 24 08:38:19 crc kubenswrapper[4718]: I1124 08:38:19.504641 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/74d63734-18c2-48c4-b472-bea0a1cb43e0-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "74d63734-18c2-48c4-b472-bea0a1cb43e0" (UID: "74d63734-18c2-48c4-b472-bea0a1cb43e0"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:38:19 crc kubenswrapper[4718]: I1124 08:38:19.504921 4718 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/74d63734-18c2-48c4-b472-bea0a1cb43e0-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 24 08:38:19 crc kubenswrapper[4718]: I1124 08:38:19.511195 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74d63734-18c2-48c4-b472-bea0a1cb43e0-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "74d63734-18c2-48c4-b472-bea0a1cb43e0" (UID: "74d63734-18c2-48c4-b472-bea0a1cb43e0"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:38:19 crc kubenswrapper[4718]: I1124 08:38:19.605767 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/74d63734-18c2-48c4-b472-bea0a1cb43e0-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 24 08:38:19 crc kubenswrapper[4718]: I1124 08:38:19.891616 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"74d63734-18c2-48c4-b472-bea0a1cb43e0","Type":"ContainerDied","Data":"fdaf745c83352d98b3a151ac41ffb5602879abb486b39bf9e7ebb13de4037263"} Nov 24 08:38:19 crc kubenswrapper[4718]: I1124 08:38:19.891661 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fdaf745c83352d98b3a151ac41ffb5602879abb486b39bf9e7ebb13de4037263" Nov 24 08:38:19 crc kubenswrapper[4718]: I1124 08:38:19.891710 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 08:38:20 crc kubenswrapper[4718]: I1124 08:38:20.508883 4718 patch_prober.go:28] interesting pod/downloads-7954f5f757-fw72r container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 24 08:38:20 crc kubenswrapper[4718]: I1124 08:38:20.508964 4718 patch_prober.go:28] interesting pod/downloads-7954f5f757-fw72r container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 24 08:38:20 crc kubenswrapper[4718]: I1124 08:38:20.509053 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-fw72r" podUID="15854168-726d-44b5-80e7-d1ca941c2941" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 24 08:38:20 crc kubenswrapper[4718]: I1124 08:38:20.508956 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-fw72r" podUID="15854168-726d-44b5-80e7-d1ca941c2941" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 24 08:38:20 crc kubenswrapper[4718]: I1124 08:38:20.509251 4718 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-fw72r" Nov 24 08:38:20 crc kubenswrapper[4718]: I1124 08:38:20.509933 4718 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"f8e7a53e1eb4ae7fa090697dbe41dff5ba878bbd101ea0209d3e3f2f2dbf4492"} pod="openshift-console/downloads-7954f5f757-fw72r" containerMessage="Container download-server failed liveness probe, will be restarted" Nov 24 08:38:20 crc kubenswrapper[4718]: I1124 08:38:20.510060 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-fw72r" podUID="15854168-726d-44b5-80e7-d1ca941c2941" containerName="download-server" containerID="cri-o://f8e7a53e1eb4ae7fa090697dbe41dff5ba878bbd101ea0209d3e3f2f2dbf4492" gracePeriod=2 Nov 24 08:38:20 crc kubenswrapper[4718]: I1124 08:38:20.510190 4718 patch_prober.go:28] interesting pod/downloads-7954f5f757-fw72r container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 24 08:38:20 crc kubenswrapper[4718]: I1124 08:38:20.510261 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-fw72r" podUID="15854168-726d-44b5-80e7-d1ca941c2941" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 24 08:38:22 crc kubenswrapper[4718]: I1124 08:38:22.044949 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:38:22 crc kubenswrapper[4718]: I1124 08:38:22.045368 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:38:30 crc kubenswrapper[4718]: I1124 08:38:30.511310 4718 patch_prober.go:28] interesting pod/downloads-7954f5f757-fw72r container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 24 08:38:30 crc kubenswrapper[4718]: I1124 08:38:30.511876 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-fw72r" podUID="15854168-726d-44b5-80e7-d1ca941c2941" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 24 08:38:31 crc kubenswrapper[4718]: I1124 08:38:31.039801 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j77qb" Nov 24 08:38:31 crc kubenswrapper[4718]: E1124 08:38:31.314986 4718 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Nov 24 08:38:31 crc kubenswrapper[4718]: E1124 08:38:31.315167 4718 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nnll9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-qcdbm_openshift-marketplace(0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 08:38:31 crc kubenswrapper[4718]: E1124 08:38:31.317142 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-qcdbm" podUID="0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16" Nov 24 08:38:34 crc kubenswrapper[4718]: E1124 08:38:34.019643 4718 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Nov 24 08:38:34 crc kubenswrapper[4718]: E1124 08:38:34.019823 4718 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v8b9b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-hg8cf_openshift-marketplace(e4eeb5aa-31cb-4c3e-8045-8132ac10b348): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 08:38:34 crc kubenswrapper[4718]: E1124 08:38:34.021663 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-hg8cf" podUID="e4eeb5aa-31cb-4c3e-8045-8132ac10b348" Nov 24 08:38:37 crc kubenswrapper[4718]: I1124 08:38:37.097363 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 08:38:37 crc kubenswrapper[4718]: E1124 08:38:37.383815 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-qcdbm" podUID="0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16" Nov 24 08:38:37 crc kubenswrapper[4718]: E1124 08:38:37.384108 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-hg8cf" podUID="e4eeb5aa-31cb-4c3e-8045-8132ac10b348" Nov 24 08:38:37 crc kubenswrapper[4718]: I1124 08:38:37.979789 4718 generic.go:334] "Generic (PLEG): container finished" podID="15854168-726d-44b5-80e7-d1ca941c2941" containerID="f8e7a53e1eb4ae7fa090697dbe41dff5ba878bbd101ea0209d3e3f2f2dbf4492" exitCode=0 Nov 24 08:38:37 crc kubenswrapper[4718]: I1124 08:38:37.979893 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-fw72r" event={"ID":"15854168-726d-44b5-80e7-d1ca941c2941","Type":"ContainerDied","Data":"f8e7a53e1eb4ae7fa090697dbe41dff5ba878bbd101ea0209d3e3f2f2dbf4492"} Nov 24 08:38:38 crc kubenswrapper[4718]: E1124 08:38:38.264620 4718 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Nov 24 08:38:38 crc kubenswrapper[4718]: E1124 08:38:38.264784 4718 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fjjhh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-gwf8f_openshift-marketplace(72a76ec8-c870-4c07-a703-a4ac3e9e97e8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 08:38:38 crc kubenswrapper[4718]: E1124 08:38:38.266696 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-gwf8f" podUID="72a76ec8-c870-4c07-a703-a4ac3e9e97e8" Nov 24 08:38:40 crc kubenswrapper[4718]: I1124 08:38:40.508521 4718 patch_prober.go:28] interesting pod/downloads-7954f5f757-fw72r container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 24 08:38:40 crc kubenswrapper[4718]: I1124 08:38:40.508581 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-fw72r" podUID="15854168-726d-44b5-80e7-d1ca941c2941" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 24 08:38:42 crc kubenswrapper[4718]: E1124 08:38:42.166476 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-gwf8f" podUID="72a76ec8-c870-4c07-a703-a4ac3e9e97e8" Nov 24 08:38:44 crc kubenswrapper[4718]: E1124 08:38:44.799162 4718 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Nov 24 08:38:44 crc kubenswrapper[4718]: E1124 08:38:44.799362 4718 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jklfx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-zpbbq_openshift-marketplace(3f4a042f-effa-4f5a-ac2d-4d378b0f15a5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 08:38:44 crc kubenswrapper[4718]: E1124 08:38:44.800667 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-zpbbq" podUID="3f4a042f-effa-4f5a-ac2d-4d378b0f15a5" Nov 24 08:38:45 crc kubenswrapper[4718]: E1124 08:38:45.745547 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-zpbbq" podUID="3f4a042f-effa-4f5a-ac2d-4d378b0f15a5" Nov 24 08:38:46 crc kubenswrapper[4718]: I1124 08:38:46.172014 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-ctdmz"] Nov 24 08:38:46 crc kubenswrapper[4718]: W1124 08:38:46.189900 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3d58e0ed_5dd0_4e0a_aed2_34dc42b30b97.slice/crio-f1b9d1579a34c07cdf411daa2ed6223e42821deb54d97f65d70430480f3c758b WatchSource:0}: Error finding container f1b9d1579a34c07cdf411daa2ed6223e42821deb54d97f65d70430480f3c758b: Status 404 returned error can't find the container with id f1b9d1579a34c07cdf411daa2ed6223e42821deb54d97f65d70430480f3c758b Nov 24 08:38:46 crc kubenswrapper[4718]: E1124 08:38:46.192773 4718 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Nov 24 08:38:46 crc kubenswrapper[4718]: E1124 08:38:46.192861 4718 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2zmnr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-lvfl4_openshift-marketplace(c66ac439-dc6b-46ee-9dd8-1488f7730fa6): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 08:38:46 crc kubenswrapper[4718]: E1124 08:38:46.194071 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-lvfl4" podUID="c66ac439-dc6b-46ee-9dd8-1488f7730fa6" Nov 24 08:38:46 crc kubenswrapper[4718]: E1124 08:38:46.313986 4718 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Nov 24 08:38:46 crc kubenswrapper[4718]: E1124 08:38:46.314454 4718 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-664lq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-w8rfg_openshift-marketplace(39df5500-d2c6-4c61-be65-cc5598f8201d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 08:38:46 crc kubenswrapper[4718]: E1124 08:38:46.315721 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-w8rfg" podUID="39df5500-d2c6-4c61-be65-cc5598f8201d" Nov 24 08:38:46 crc kubenswrapper[4718]: E1124 08:38:46.501745 4718 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Nov 24 08:38:46 crc kubenswrapper[4718]: E1124 08:38:46.501891 4718 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lwl8p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-9zgmp_openshift-marketplace(44264beb-2b14-47a9-9da4-18ca5e19d282): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 08:38:46 crc kubenswrapper[4718]: E1124 08:38:46.503059 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-9zgmp" podUID="44264beb-2b14-47a9-9da4-18ca5e19d282" Nov 24 08:38:47 crc kubenswrapper[4718]: I1124 08:38:47.038584 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-fw72r" event={"ID":"15854168-726d-44b5-80e7-d1ca941c2941","Type":"ContainerStarted","Data":"3d9339635f7e322ebe8606b120178b3e88c558411e3a087256900232aea4d85d"} Nov 24 08:38:47 crc kubenswrapper[4718]: I1124 08:38:47.038956 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-fw72r" Nov 24 08:38:47 crc kubenswrapper[4718]: I1124 08:38:47.039508 4718 patch_prober.go:28] interesting pod/downloads-7954f5f757-fw72r container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 24 08:38:47 crc kubenswrapper[4718]: I1124 08:38:47.039558 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-fw72r" podUID="15854168-726d-44b5-80e7-d1ca941c2941" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 24 08:38:47 crc kubenswrapper[4718]: I1124 08:38:47.041581 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" event={"ID":"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97","Type":"ContainerStarted","Data":"44b9ae36a27e338277ab6c8e9e4b9de58a4b354fc72bbaaace9e9c36644e6552"} Nov 24 08:38:47 crc kubenswrapper[4718]: I1124 08:38:47.041642 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" event={"ID":"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97","Type":"ContainerStarted","Data":"f1b9d1579a34c07cdf411daa2ed6223e42821deb54d97f65d70430480f3c758b"} Nov 24 08:38:47 crc kubenswrapper[4718]: E1124 08:38:47.042723 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-lvfl4" podUID="c66ac439-dc6b-46ee-9dd8-1488f7730fa6" Nov 24 08:38:47 crc kubenswrapper[4718]: E1124 08:38:47.043836 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-9zgmp" podUID="44264beb-2b14-47a9-9da4-18ca5e19d282" Nov 24 08:38:47 crc kubenswrapper[4718]: E1124 08:38:47.044127 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-w8rfg" podUID="39df5500-d2c6-4c61-be65-cc5598f8201d" Nov 24 08:38:48 crc kubenswrapper[4718]: I1124 08:38:48.051475 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-ctdmz" event={"ID":"3d58e0ed-5dd0-4e0a-aed2-34dc42b30b97","Type":"ContainerStarted","Data":"2250ba45784b323bd6c7aa0edf802531e70e7c7110964087e2335f17105564d5"} Nov 24 08:38:48 crc kubenswrapper[4718]: I1124 08:38:48.052022 4718 patch_prober.go:28] interesting pod/downloads-7954f5f757-fw72r container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 24 08:38:48 crc kubenswrapper[4718]: I1124 08:38:48.052058 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-fw72r" podUID="15854168-726d-44b5-80e7-d1ca941c2941" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 24 08:38:48 crc kubenswrapper[4718]: E1124 08:38:48.165610 4718 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Nov 24 08:38:48 crc kubenswrapper[4718]: E1124 08:38:48.165770 4718 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wvjnt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-gl4gc_openshift-marketplace(baad1930-cdc6-4c31-b707-b3f35554f711): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 08:38:48 crc kubenswrapper[4718]: E1124 08:38:48.167102 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-gl4gc" podUID="baad1930-cdc6-4c31-b707-b3f35554f711" Nov 24 08:38:49 crc kubenswrapper[4718]: E1124 08:38:49.057636 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-gl4gc" podUID="baad1930-cdc6-4c31-b707-b3f35554f711" Nov 24 08:38:49 crc kubenswrapper[4718]: I1124 08:38:49.089068 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-ctdmz" podStartSLOduration=181.089045921 podStartE2EDuration="3m1.089045921s" podCreationTimestamp="2025-11-24 08:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:38:49.069700059 +0000 UTC m=+201.185990983" watchObservedRunningTime="2025-11-24 08:38:49.089045921 +0000 UTC m=+201.205336845" Nov 24 08:38:50 crc kubenswrapper[4718]: I1124 08:38:50.508844 4718 patch_prober.go:28] interesting pod/downloads-7954f5f757-fw72r container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 24 08:38:50 crc kubenswrapper[4718]: I1124 08:38:50.509126 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-fw72r" podUID="15854168-726d-44b5-80e7-d1ca941c2941" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 24 08:38:50 crc kubenswrapper[4718]: I1124 08:38:50.508957 4718 patch_prober.go:28] interesting pod/downloads-7954f5f757-fw72r container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 24 08:38:50 crc kubenswrapper[4718]: I1124 08:38:50.509177 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-fw72r" podUID="15854168-726d-44b5-80e7-d1ca941c2941" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 24 08:38:52 crc kubenswrapper[4718]: I1124 08:38:52.045949 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:38:52 crc kubenswrapper[4718]: I1124 08:38:52.046064 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:38:52 crc kubenswrapper[4718]: I1124 08:38:52.046159 4718 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:38:52 crc kubenswrapper[4718]: I1124 08:38:52.047147 4718 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67"} pod="openshift-machine-config-operator/machine-config-daemon-575gl" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 08:38:52 crc kubenswrapper[4718]: I1124 08:38:52.047228 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" containerID="cri-o://c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67" gracePeriod=600 Nov 24 08:38:53 crc kubenswrapper[4718]: I1124 08:38:53.079165 4718 generic.go:334] "Generic (PLEG): container finished" podID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerID="c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67" exitCode=0 Nov 24 08:38:53 crc kubenswrapper[4718]: I1124 08:38:53.079249 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerDied","Data":"c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67"} Nov 24 08:38:54 crc kubenswrapper[4718]: I1124 08:38:54.087652 4718 generic.go:334] "Generic (PLEG): container finished" podID="0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16" containerID="d97d600f47a7452b23da1602adbb3845dad0b7e3477101c2fd89dc66e5ab6f16" exitCode=0 Nov 24 08:38:54 crc kubenswrapper[4718]: I1124 08:38:54.087868 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qcdbm" event={"ID":"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16","Type":"ContainerDied","Data":"d97d600f47a7452b23da1602adbb3845dad0b7e3477101c2fd89dc66e5ab6f16"} Nov 24 08:38:54 crc kubenswrapper[4718]: I1124 08:38:54.095119 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerStarted","Data":"3a7ca29b97a51852d552c1a2d7e2bdb50bf9e50e07b800355266295362166198"} Nov 24 08:38:55 crc kubenswrapper[4718]: I1124 08:38:55.101724 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qcdbm" event={"ID":"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16","Type":"ContainerStarted","Data":"8c9ccfa7d475d090583a91d4871f3dfecdaa41f6b6eccb1193c8dbaaa34d2bf1"} Nov 24 08:38:55 crc kubenswrapper[4718]: I1124 08:38:55.103390 4718 generic.go:334] "Generic (PLEG): container finished" podID="e4eeb5aa-31cb-4c3e-8045-8132ac10b348" containerID="45067df5f269d4815740ece6d73337bf1cee5e2124774babbb9051b4c3dd0365" exitCode=0 Nov 24 08:38:55 crc kubenswrapper[4718]: I1124 08:38:55.103433 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hg8cf" event={"ID":"e4eeb5aa-31cb-4c3e-8045-8132ac10b348","Type":"ContainerDied","Data":"45067df5f269d4815740ece6d73337bf1cee5e2124774babbb9051b4c3dd0365"} Nov 24 08:38:55 crc kubenswrapper[4718]: I1124 08:38:55.105286 4718 generic.go:334] "Generic (PLEG): container finished" podID="72a76ec8-c870-4c07-a703-a4ac3e9e97e8" containerID="57d1d709e96628e057dda0662183a7ecd5f33d974ea53b13d350769b8c835f67" exitCode=0 Nov 24 08:38:55 crc kubenswrapper[4718]: I1124 08:38:55.105315 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gwf8f" event={"ID":"72a76ec8-c870-4c07-a703-a4ac3e9e97e8","Type":"ContainerDied","Data":"57d1d709e96628e057dda0662183a7ecd5f33d974ea53b13d350769b8c835f67"} Nov 24 08:38:55 crc kubenswrapper[4718]: I1124 08:38:55.121078 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qcdbm" podStartSLOduration=4.17055062 podStartE2EDuration="59.121060462s" podCreationTimestamp="2025-11-24 08:37:56 +0000 UTC" firstStartedPulling="2025-11-24 08:37:59.552312947 +0000 UTC m=+151.668603851" lastFinishedPulling="2025-11-24 08:38:54.502822799 +0000 UTC m=+206.619113693" observedRunningTime="2025-11-24 08:38:55.117180915 +0000 UTC m=+207.233471829" watchObservedRunningTime="2025-11-24 08:38:55.121060462 +0000 UTC m=+207.237351366" Nov 24 08:38:56 crc kubenswrapper[4718]: I1124 08:38:56.113556 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gwf8f" event={"ID":"72a76ec8-c870-4c07-a703-a4ac3e9e97e8","Type":"ContainerStarted","Data":"6a25c694cf4a4c898d116bc6bcb996f2bdaa5917a5290e3aaedb8130ed37bc69"} Nov 24 08:38:56 crc kubenswrapper[4718]: I1124 08:38:56.115953 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hg8cf" event={"ID":"e4eeb5aa-31cb-4c3e-8045-8132ac10b348","Type":"ContainerStarted","Data":"eb3a276ba3de9580d3e6644add1ee910dc25e600e802e829c4b5381c05881e14"} Nov 24 08:38:56 crc kubenswrapper[4718]: I1124 08:38:56.141122 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gwf8f" podStartSLOduration=2.01958666 podStartE2EDuration="57.141105771s" podCreationTimestamp="2025-11-24 08:37:59 +0000 UTC" firstStartedPulling="2025-11-24 08:38:00.618496205 +0000 UTC m=+152.734787109" lastFinishedPulling="2025-11-24 08:38:55.740015316 +0000 UTC m=+207.856306220" observedRunningTime="2025-11-24 08:38:56.139044584 +0000 UTC m=+208.255335508" watchObservedRunningTime="2025-11-24 08:38:56.141105771 +0000 UTC m=+208.257396665" Nov 24 08:38:57 crc kubenswrapper[4718]: I1124 08:38:57.392081 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qcdbm" Nov 24 08:38:57 crc kubenswrapper[4718]: I1124 08:38:57.392583 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qcdbm" Nov 24 08:38:57 crc kubenswrapper[4718]: I1124 08:38:57.539339 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qcdbm" Nov 24 08:38:57 crc kubenswrapper[4718]: I1124 08:38:57.557825 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hg8cf" podStartSLOduration=3.361895215 podStartE2EDuration="1m0.557805572s" podCreationTimestamp="2025-11-24 08:37:57 +0000 UTC" firstStartedPulling="2025-11-24 08:37:58.492313193 +0000 UTC m=+150.608604097" lastFinishedPulling="2025-11-24 08:38:55.68822355 +0000 UTC m=+207.804514454" observedRunningTime="2025-11-24 08:38:56.158992433 +0000 UTC m=+208.275283347" watchObservedRunningTime="2025-11-24 08:38:57.557805572 +0000 UTC m=+209.674096486" Nov 24 08:38:57 crc kubenswrapper[4718]: I1124 08:38:57.663319 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hg8cf" Nov 24 08:38:57 crc kubenswrapper[4718]: I1124 08:38:57.664089 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hg8cf" Nov 24 08:38:57 crc kubenswrapper[4718]: I1124 08:38:57.700905 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hg8cf" Nov 24 08:38:59 crc kubenswrapper[4718]: I1124 08:38:59.795158 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gwf8f" Nov 24 08:38:59 crc kubenswrapper[4718]: I1124 08:38:59.797907 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gwf8f" Nov 24 08:38:59 crc kubenswrapper[4718]: I1124 08:38:59.845480 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gwf8f" Nov 24 08:39:00 crc kubenswrapper[4718]: I1124 08:39:00.190141 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gwf8f" Nov 24 08:39:00 crc kubenswrapper[4718]: I1124 08:39:00.514237 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-fw72r" Nov 24 08:39:02 crc kubenswrapper[4718]: I1124 08:39:02.152521 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zpbbq" event={"ID":"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5","Type":"ContainerStarted","Data":"c95977bb249805c70a7e91414b7d8b00d41ae009536656e2a728acfb33127017"} Nov 24 08:39:02 crc kubenswrapper[4718]: I1124 08:39:02.155561 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lvfl4" event={"ID":"c66ac439-dc6b-46ee-9dd8-1488f7730fa6","Type":"ContainerStarted","Data":"3ee7010f5ab770e7d5b5deacabb10027da818996fe24234e36644b014c57e1b3"} Nov 24 08:39:02 crc kubenswrapper[4718]: I1124 08:39:02.157094 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9zgmp" event={"ID":"44264beb-2b14-47a9-9da4-18ca5e19d282","Type":"ContainerStarted","Data":"58fc23eba20db52c1c741efcee838beef24cfaeea7e065704fb98e89aa81dc15"} Nov 24 08:39:02 crc kubenswrapper[4718]: I1124 08:39:02.424600 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gwf8f"] Nov 24 08:39:02 crc kubenswrapper[4718]: I1124 08:39:02.424831 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gwf8f" podUID="72a76ec8-c870-4c07-a703-a4ac3e9e97e8" containerName="registry-server" containerID="cri-o://6a25c694cf4a4c898d116bc6bcb996f2bdaa5917a5290e3aaedb8130ed37bc69" gracePeriod=2 Nov 24 08:39:02 crc kubenswrapper[4718]: I1124 08:39:02.865658 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gwf8f" Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.054798 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjjhh\" (UniqueName: \"kubernetes.io/projected/72a76ec8-c870-4c07-a703-a4ac3e9e97e8-kube-api-access-fjjhh\") pod \"72a76ec8-c870-4c07-a703-a4ac3e9e97e8\" (UID: \"72a76ec8-c870-4c07-a703-a4ac3e9e97e8\") " Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.055275 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72a76ec8-c870-4c07-a703-a4ac3e9e97e8-catalog-content\") pod \"72a76ec8-c870-4c07-a703-a4ac3e9e97e8\" (UID: \"72a76ec8-c870-4c07-a703-a4ac3e9e97e8\") " Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.055348 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72a76ec8-c870-4c07-a703-a4ac3e9e97e8-utilities\") pod \"72a76ec8-c870-4c07-a703-a4ac3e9e97e8\" (UID: \"72a76ec8-c870-4c07-a703-a4ac3e9e97e8\") " Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.056277 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72a76ec8-c870-4c07-a703-a4ac3e9e97e8-utilities" (OuterVolumeSpecName: "utilities") pod "72a76ec8-c870-4c07-a703-a4ac3e9e97e8" (UID: "72a76ec8-c870-4c07-a703-a4ac3e9e97e8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.063916 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72a76ec8-c870-4c07-a703-a4ac3e9e97e8-kube-api-access-fjjhh" (OuterVolumeSpecName: "kube-api-access-fjjhh") pod "72a76ec8-c870-4c07-a703-a4ac3e9e97e8" (UID: "72a76ec8-c870-4c07-a703-a4ac3e9e97e8"). InnerVolumeSpecName "kube-api-access-fjjhh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.072773 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72a76ec8-c870-4c07-a703-a4ac3e9e97e8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "72a76ec8-c870-4c07-a703-a4ac3e9e97e8" (UID: "72a76ec8-c870-4c07-a703-a4ac3e9e97e8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.156260 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72a76ec8-c870-4c07-a703-a4ac3e9e97e8-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.156298 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjjhh\" (UniqueName: \"kubernetes.io/projected/72a76ec8-c870-4c07-a703-a4ac3e9e97e8-kube-api-access-fjjhh\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.156310 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72a76ec8-c870-4c07-a703-a4ac3e9e97e8-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.163870 4718 generic.go:334] "Generic (PLEG): container finished" podID="39df5500-d2c6-4c61-be65-cc5598f8201d" containerID="d447947dd9bedfe752e905efc1fbbc4e59f644c1f781e1ccbf52c6e10ff7a59f" exitCode=0 Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.163956 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w8rfg" event={"ID":"39df5500-d2c6-4c61-be65-cc5598f8201d","Type":"ContainerDied","Data":"d447947dd9bedfe752e905efc1fbbc4e59f644c1f781e1ccbf52c6e10ff7a59f"} Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.165754 4718 generic.go:334] "Generic (PLEG): container finished" podID="c66ac439-dc6b-46ee-9dd8-1488f7730fa6" containerID="3ee7010f5ab770e7d5b5deacabb10027da818996fe24234e36644b014c57e1b3" exitCode=0 Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.165809 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lvfl4" event={"ID":"c66ac439-dc6b-46ee-9dd8-1488f7730fa6","Type":"ContainerDied","Data":"3ee7010f5ab770e7d5b5deacabb10027da818996fe24234e36644b014c57e1b3"} Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.167238 4718 generic.go:334] "Generic (PLEG): container finished" podID="44264beb-2b14-47a9-9da4-18ca5e19d282" containerID="58fc23eba20db52c1c741efcee838beef24cfaeea7e065704fb98e89aa81dc15" exitCode=0 Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.167269 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9zgmp" event={"ID":"44264beb-2b14-47a9-9da4-18ca5e19d282","Type":"ContainerDied","Data":"58fc23eba20db52c1c741efcee838beef24cfaeea7e065704fb98e89aa81dc15"} Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.170610 4718 generic.go:334] "Generic (PLEG): container finished" podID="3f4a042f-effa-4f5a-ac2d-4d378b0f15a5" containerID="c95977bb249805c70a7e91414b7d8b00d41ae009536656e2a728acfb33127017" exitCode=0 Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.170726 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zpbbq" event={"ID":"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5","Type":"ContainerDied","Data":"c95977bb249805c70a7e91414b7d8b00d41ae009536656e2a728acfb33127017"} Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.179888 4718 generic.go:334] "Generic (PLEG): container finished" podID="72a76ec8-c870-4c07-a703-a4ac3e9e97e8" containerID="6a25c694cf4a4c898d116bc6bcb996f2bdaa5917a5290e3aaedb8130ed37bc69" exitCode=0 Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.179923 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gwf8f" event={"ID":"72a76ec8-c870-4c07-a703-a4ac3e9e97e8","Type":"ContainerDied","Data":"6a25c694cf4a4c898d116bc6bcb996f2bdaa5917a5290e3aaedb8130ed37bc69"} Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.179951 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gwf8f" event={"ID":"72a76ec8-c870-4c07-a703-a4ac3e9e97e8","Type":"ContainerDied","Data":"83608858fc435fb1a9a4bba6757cc006e401f1d47abb361fc405bc7936dd8287"} Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.179985 4718 scope.go:117] "RemoveContainer" containerID="6a25c694cf4a4c898d116bc6bcb996f2bdaa5917a5290e3aaedb8130ed37bc69" Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.180129 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gwf8f" Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.206079 4718 scope.go:117] "RemoveContainer" containerID="57d1d709e96628e057dda0662183a7ecd5f33d974ea53b13d350769b8c835f67" Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.241154 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gwf8f"] Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.243402 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gwf8f"] Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.244054 4718 scope.go:117] "RemoveContainer" containerID="0a3c5cec93bbe6084b8aa02e12dfb827bed67e94471affc6a139d8bb74454e55" Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.255742 4718 scope.go:117] "RemoveContainer" containerID="6a25c694cf4a4c898d116bc6bcb996f2bdaa5917a5290e3aaedb8130ed37bc69" Nov 24 08:39:03 crc kubenswrapper[4718]: E1124 08:39:03.256265 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a25c694cf4a4c898d116bc6bcb996f2bdaa5917a5290e3aaedb8130ed37bc69\": container with ID starting with 6a25c694cf4a4c898d116bc6bcb996f2bdaa5917a5290e3aaedb8130ed37bc69 not found: ID does not exist" containerID="6a25c694cf4a4c898d116bc6bcb996f2bdaa5917a5290e3aaedb8130ed37bc69" Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.256307 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a25c694cf4a4c898d116bc6bcb996f2bdaa5917a5290e3aaedb8130ed37bc69"} err="failed to get container status \"6a25c694cf4a4c898d116bc6bcb996f2bdaa5917a5290e3aaedb8130ed37bc69\": rpc error: code = NotFound desc = could not find container \"6a25c694cf4a4c898d116bc6bcb996f2bdaa5917a5290e3aaedb8130ed37bc69\": container with ID starting with 6a25c694cf4a4c898d116bc6bcb996f2bdaa5917a5290e3aaedb8130ed37bc69 not found: ID does not exist" Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.256334 4718 scope.go:117] "RemoveContainer" containerID="57d1d709e96628e057dda0662183a7ecd5f33d974ea53b13d350769b8c835f67" Nov 24 08:39:03 crc kubenswrapper[4718]: E1124 08:39:03.256752 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57d1d709e96628e057dda0662183a7ecd5f33d974ea53b13d350769b8c835f67\": container with ID starting with 57d1d709e96628e057dda0662183a7ecd5f33d974ea53b13d350769b8c835f67 not found: ID does not exist" containerID="57d1d709e96628e057dda0662183a7ecd5f33d974ea53b13d350769b8c835f67" Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.256790 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57d1d709e96628e057dda0662183a7ecd5f33d974ea53b13d350769b8c835f67"} err="failed to get container status \"57d1d709e96628e057dda0662183a7ecd5f33d974ea53b13d350769b8c835f67\": rpc error: code = NotFound desc = could not find container \"57d1d709e96628e057dda0662183a7ecd5f33d974ea53b13d350769b8c835f67\": container with ID starting with 57d1d709e96628e057dda0662183a7ecd5f33d974ea53b13d350769b8c835f67 not found: ID does not exist" Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.256813 4718 scope.go:117] "RemoveContainer" containerID="0a3c5cec93bbe6084b8aa02e12dfb827bed67e94471affc6a139d8bb74454e55" Nov 24 08:39:03 crc kubenswrapper[4718]: E1124 08:39:03.257313 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a3c5cec93bbe6084b8aa02e12dfb827bed67e94471affc6a139d8bb74454e55\": container with ID starting with 0a3c5cec93bbe6084b8aa02e12dfb827bed67e94471affc6a139d8bb74454e55 not found: ID does not exist" containerID="0a3c5cec93bbe6084b8aa02e12dfb827bed67e94471affc6a139d8bb74454e55" Nov 24 08:39:03 crc kubenswrapper[4718]: I1124 08:39:03.257335 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a3c5cec93bbe6084b8aa02e12dfb827bed67e94471affc6a139d8bb74454e55"} err="failed to get container status \"0a3c5cec93bbe6084b8aa02e12dfb827bed67e94471affc6a139d8bb74454e55\": rpc error: code = NotFound desc = could not find container \"0a3c5cec93bbe6084b8aa02e12dfb827bed67e94471affc6a139d8bb74454e55\": container with ID starting with 0a3c5cec93bbe6084b8aa02e12dfb827bed67e94471affc6a139d8bb74454e55 not found: ID does not exist" Nov 24 08:39:04 crc kubenswrapper[4718]: I1124 08:39:04.603443 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72a76ec8-c870-4c07-a703-a4ac3e9e97e8" path="/var/lib/kubelet/pods/72a76ec8-c870-4c07-a703-a4ac3e9e97e8/volumes" Nov 24 08:39:05 crc kubenswrapper[4718]: I1124 08:39:05.192131 4718 generic.go:334] "Generic (PLEG): container finished" podID="baad1930-cdc6-4c31-b707-b3f35554f711" containerID="d0663cbbe92f6821fd606f986b58e04167aa8043853c85a34712ea422c3daa73" exitCode=0 Nov 24 08:39:05 crc kubenswrapper[4718]: I1124 08:39:05.192325 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gl4gc" event={"ID":"baad1930-cdc6-4c31-b707-b3f35554f711","Type":"ContainerDied","Data":"d0663cbbe92f6821fd606f986b58e04167aa8043853c85a34712ea422c3daa73"} Nov 24 08:39:05 crc kubenswrapper[4718]: I1124 08:39:05.195494 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w8rfg" event={"ID":"39df5500-d2c6-4c61-be65-cc5598f8201d","Type":"ContainerStarted","Data":"df9fec50a376b40d04ce584c85cd9480a0dc77d378793ce49686185e5b9af75d"} Nov 24 08:39:05 crc kubenswrapper[4718]: I1124 08:39:05.222957 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-w8rfg" podStartSLOduration=2.713161609 podStartE2EDuration="1m6.222938523s" podCreationTimestamp="2025-11-24 08:37:59 +0000 UTC" firstStartedPulling="2025-11-24 08:38:00.633016585 +0000 UTC m=+152.749307489" lastFinishedPulling="2025-11-24 08:39:04.142793499 +0000 UTC m=+216.259084403" observedRunningTime="2025-11-24 08:39:05.220535407 +0000 UTC m=+217.336826321" watchObservedRunningTime="2025-11-24 08:39:05.222938523 +0000 UTC m=+217.339229427" Nov 24 08:39:07 crc kubenswrapper[4718]: I1124 08:39:07.206777 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9zgmp" event={"ID":"44264beb-2b14-47a9-9da4-18ca5e19d282","Type":"ContainerStarted","Data":"66436d056290897fb2eec3859e284233c54d45823fc35ac04bf5781c559ed14f"} Nov 24 08:39:07 crc kubenswrapper[4718]: I1124 08:39:07.225314 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9zgmp" podStartSLOduration=2.738622633 podStartE2EDuration="1m10.225295071s" podCreationTimestamp="2025-11-24 08:37:57 +0000 UTC" firstStartedPulling="2025-11-24 08:37:58.449916616 +0000 UTC m=+150.566207520" lastFinishedPulling="2025-11-24 08:39:05.936589054 +0000 UTC m=+218.052879958" observedRunningTime="2025-11-24 08:39:07.223587814 +0000 UTC m=+219.339878718" watchObservedRunningTime="2025-11-24 08:39:07.225295071 +0000 UTC m=+219.341585965" Nov 24 08:39:07 crc kubenswrapper[4718]: I1124 08:39:07.434313 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9zgmp" Nov 24 08:39:07 crc kubenswrapper[4718]: I1124 08:39:07.434384 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9zgmp" Nov 24 08:39:07 crc kubenswrapper[4718]: I1124 08:39:07.663402 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qcdbm" Nov 24 08:39:07 crc kubenswrapper[4718]: I1124 08:39:07.712761 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hg8cf" Nov 24 08:39:08 crc kubenswrapper[4718]: I1124 08:39:08.467260 4718 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-9zgmp" podUID="44264beb-2b14-47a9-9da4-18ca5e19d282" containerName="registry-server" probeResult="failure" output=< Nov 24 08:39:08 crc kubenswrapper[4718]: timeout: failed to connect service ":50051" within 1s Nov 24 08:39:08 crc kubenswrapper[4718]: > Nov 24 08:39:09 crc kubenswrapper[4718]: I1124 08:39:09.405722 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-w8rfg" Nov 24 08:39:09 crc kubenswrapper[4718]: I1124 08:39:09.405775 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-w8rfg" Nov 24 08:39:09 crc kubenswrapper[4718]: I1124 08:39:09.452126 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xjcpp"] Nov 24 08:39:09 crc kubenswrapper[4718]: I1124 08:39:09.454987 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-w8rfg" Nov 24 08:39:10 crc kubenswrapper[4718]: I1124 08:39:10.259079 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-w8rfg" Nov 24 08:39:10 crc kubenswrapper[4718]: I1124 08:39:10.823833 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hg8cf"] Nov 24 08:39:10 crc kubenswrapper[4718]: I1124 08:39:10.824673 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hg8cf" podUID="e4eeb5aa-31cb-4c3e-8045-8132ac10b348" containerName="registry-server" containerID="cri-o://eb3a276ba3de9580d3e6644add1ee910dc25e600e802e829c4b5381c05881e14" gracePeriod=2 Nov 24 08:39:11 crc kubenswrapper[4718]: I1124 08:39:11.228135 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lvfl4" event={"ID":"c66ac439-dc6b-46ee-9dd8-1488f7730fa6","Type":"ContainerStarted","Data":"c3e4da15783a02bf1ac359d611f11e95cc9ea3f3b693dabdd6f63d818ee47423"} Nov 24 08:39:12 crc kubenswrapper[4718]: I1124 08:39:12.234164 4718 generic.go:334] "Generic (PLEG): container finished" podID="e4eeb5aa-31cb-4c3e-8045-8132ac10b348" containerID="eb3a276ba3de9580d3e6644add1ee910dc25e600e802e829c4b5381c05881e14" exitCode=0 Nov 24 08:39:12 crc kubenswrapper[4718]: I1124 08:39:12.234256 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hg8cf" event={"ID":"e4eeb5aa-31cb-4c3e-8045-8132ac10b348","Type":"ContainerDied","Data":"eb3a276ba3de9580d3e6644add1ee910dc25e600e802e829c4b5381c05881e14"} Nov 24 08:39:12 crc kubenswrapper[4718]: I1124 08:39:12.252014 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lvfl4" podStartSLOduration=4.737684152 podStartE2EDuration="1m12.251959618s" podCreationTimestamp="2025-11-24 08:38:00 +0000 UTC" firstStartedPulling="2025-11-24 08:38:02.736154956 +0000 UTC m=+154.852445860" lastFinishedPulling="2025-11-24 08:39:10.250430422 +0000 UTC m=+222.366721326" observedRunningTime="2025-11-24 08:39:12.251395162 +0000 UTC m=+224.367686066" watchObservedRunningTime="2025-11-24 08:39:12.251959618 +0000 UTC m=+224.368250522" Nov 24 08:39:12 crc kubenswrapper[4718]: I1124 08:39:12.687802 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hg8cf" Nov 24 08:39:12 crc kubenswrapper[4718]: I1124 08:39:12.775508 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb5aa-31cb-4c3e-8045-8132ac10b348-catalog-content\") pod \"e4eeb5aa-31cb-4c3e-8045-8132ac10b348\" (UID: \"e4eeb5aa-31cb-4c3e-8045-8132ac10b348\") " Nov 24 08:39:12 crc kubenswrapper[4718]: I1124 08:39:12.775634 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8b9b\" (UniqueName: \"kubernetes.io/projected/e4eeb5aa-31cb-4c3e-8045-8132ac10b348-kube-api-access-v8b9b\") pod \"e4eeb5aa-31cb-4c3e-8045-8132ac10b348\" (UID: \"e4eeb5aa-31cb-4c3e-8045-8132ac10b348\") " Nov 24 08:39:12 crc kubenswrapper[4718]: I1124 08:39:12.775661 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb5aa-31cb-4c3e-8045-8132ac10b348-utilities\") pod \"e4eeb5aa-31cb-4c3e-8045-8132ac10b348\" (UID: \"e4eeb5aa-31cb-4c3e-8045-8132ac10b348\") " Nov 24 08:39:12 crc kubenswrapper[4718]: I1124 08:39:12.776512 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4eeb5aa-31cb-4c3e-8045-8132ac10b348-utilities" (OuterVolumeSpecName: "utilities") pod "e4eeb5aa-31cb-4c3e-8045-8132ac10b348" (UID: "e4eeb5aa-31cb-4c3e-8045-8132ac10b348"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:39:12 crc kubenswrapper[4718]: I1124 08:39:12.784506 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4eeb5aa-31cb-4c3e-8045-8132ac10b348-kube-api-access-v8b9b" (OuterVolumeSpecName: "kube-api-access-v8b9b") pod "e4eeb5aa-31cb-4c3e-8045-8132ac10b348" (UID: "e4eeb5aa-31cb-4c3e-8045-8132ac10b348"). InnerVolumeSpecName "kube-api-access-v8b9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:39:12 crc kubenswrapper[4718]: I1124 08:39:12.819917 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4eeb5aa-31cb-4c3e-8045-8132ac10b348-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e4eeb5aa-31cb-4c3e-8045-8132ac10b348" (UID: "e4eeb5aa-31cb-4c3e-8045-8132ac10b348"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:39:12 crc kubenswrapper[4718]: I1124 08:39:12.877046 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8b9b\" (UniqueName: \"kubernetes.io/projected/e4eeb5aa-31cb-4c3e-8045-8132ac10b348-kube-api-access-v8b9b\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:12 crc kubenswrapper[4718]: I1124 08:39:12.877090 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb5aa-31cb-4c3e-8045-8132ac10b348-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:12 crc kubenswrapper[4718]: I1124 08:39:12.877103 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb5aa-31cb-4c3e-8045-8132ac10b348-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:13 crc kubenswrapper[4718]: I1124 08:39:13.241318 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hg8cf" Nov 24 08:39:13 crc kubenswrapper[4718]: I1124 08:39:13.241216 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hg8cf" event={"ID":"e4eeb5aa-31cb-4c3e-8045-8132ac10b348","Type":"ContainerDied","Data":"b35c90010c5540aa38fd4d8b03f64fa133e2053da097596968d95f94afb89848"} Nov 24 08:39:13 crc kubenswrapper[4718]: I1124 08:39:13.242124 4718 scope.go:117] "RemoveContainer" containerID="eb3a276ba3de9580d3e6644add1ee910dc25e600e802e829c4b5381c05881e14" Nov 24 08:39:13 crc kubenswrapper[4718]: I1124 08:39:13.256071 4718 scope.go:117] "RemoveContainer" containerID="45067df5f269d4815740ece6d73337bf1cee5e2124774babbb9051b4c3dd0365" Nov 24 08:39:13 crc kubenswrapper[4718]: I1124 08:39:13.264381 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hg8cf"] Nov 24 08:39:13 crc kubenswrapper[4718]: I1124 08:39:13.268479 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hg8cf"] Nov 24 08:39:13 crc kubenswrapper[4718]: I1124 08:39:13.284570 4718 scope.go:117] "RemoveContainer" containerID="a59ee3d3be05b598589c108616fa0961d8ea8da823061c581dc57cda2ef25f46" Nov 24 08:39:14 crc kubenswrapper[4718]: I1124 08:39:14.248354 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zpbbq" event={"ID":"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5","Type":"ContainerStarted","Data":"5c76ce6432a5bb91e4314caffe626fb0319a70d68b99387a1f95fde0e60321b1"} Nov 24 08:39:14 crc kubenswrapper[4718]: I1124 08:39:14.264319 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zpbbq" podStartSLOduration=3.168241511 podStartE2EDuration="1m14.26430527s" podCreationTimestamp="2025-11-24 08:38:00 +0000 UTC" firstStartedPulling="2025-11-24 08:38:01.695312053 +0000 UTC m=+153.811602957" lastFinishedPulling="2025-11-24 08:39:12.791375822 +0000 UTC m=+224.907666716" observedRunningTime="2025-11-24 08:39:14.263632732 +0000 UTC m=+226.379923636" watchObservedRunningTime="2025-11-24 08:39:14.26430527 +0000 UTC m=+226.380596174" Nov 24 08:39:14 crc kubenswrapper[4718]: I1124 08:39:14.604542 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4eeb5aa-31cb-4c3e-8045-8132ac10b348" path="/var/lib/kubelet/pods/e4eeb5aa-31cb-4c3e-8045-8132ac10b348/volumes" Nov 24 08:39:16 crc kubenswrapper[4718]: I1124 08:39:16.261919 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gl4gc" event={"ID":"baad1930-cdc6-4c31-b707-b3f35554f711","Type":"ContainerStarted","Data":"07d3d4ccd755ab2376a3c895860d1fd9ed0f7f4cf2d0c0dd0155d50fdc9d6c15"} Nov 24 08:39:16 crc kubenswrapper[4718]: I1124 08:39:16.283536 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gl4gc" podStartSLOduration=3.644487161 podStartE2EDuration="1m19.283519033s" podCreationTimestamp="2025-11-24 08:37:57 +0000 UTC" firstStartedPulling="2025-11-24 08:37:59.579564356 +0000 UTC m=+151.695855260" lastFinishedPulling="2025-11-24 08:39:15.218596228 +0000 UTC m=+227.334887132" observedRunningTime="2025-11-24 08:39:16.280747757 +0000 UTC m=+228.397038671" watchObservedRunningTime="2025-11-24 08:39:16.283519033 +0000 UTC m=+228.399809937" Nov 24 08:39:17 crc kubenswrapper[4718]: I1124 08:39:17.468461 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9zgmp" Nov 24 08:39:17 crc kubenswrapper[4718]: I1124 08:39:17.506388 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9zgmp" Nov 24 08:39:17 crc kubenswrapper[4718]: I1124 08:39:17.859728 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gl4gc" Nov 24 08:39:17 crc kubenswrapper[4718]: I1124 08:39:17.859798 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gl4gc" Nov 24 08:39:18 crc kubenswrapper[4718]: I1124 08:39:18.894573 4718 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-gl4gc" podUID="baad1930-cdc6-4c31-b707-b3f35554f711" containerName="registry-server" probeResult="failure" output=< Nov 24 08:39:18 crc kubenswrapper[4718]: timeout: failed to connect service ":50051" within 1s Nov 24 08:39:18 crc kubenswrapper[4718]: > Nov 24 08:39:20 crc kubenswrapper[4718]: I1124 08:39:20.466723 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zpbbq" Nov 24 08:39:20 crc kubenswrapper[4718]: I1124 08:39:20.467098 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zpbbq" Nov 24 08:39:20 crc kubenswrapper[4718]: I1124 08:39:20.508101 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zpbbq" Nov 24 08:39:20 crc kubenswrapper[4718]: I1124 08:39:20.847104 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lvfl4" Nov 24 08:39:20 crc kubenswrapper[4718]: I1124 08:39:20.847154 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lvfl4" Nov 24 08:39:20 crc kubenswrapper[4718]: I1124 08:39:20.883042 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lvfl4" Nov 24 08:39:21 crc kubenswrapper[4718]: I1124 08:39:21.316910 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zpbbq" Nov 24 08:39:21 crc kubenswrapper[4718]: I1124 08:39:21.317214 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lvfl4" Nov 24 08:39:22 crc kubenswrapper[4718]: I1124 08:39:22.824553 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lvfl4"] Nov 24 08:39:23 crc kubenswrapper[4718]: I1124 08:39:23.291153 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lvfl4" podUID="c66ac439-dc6b-46ee-9dd8-1488f7730fa6" containerName="registry-server" containerID="cri-o://c3e4da15783a02bf1ac359d611f11e95cc9ea3f3b693dabdd6f63d818ee47423" gracePeriod=2 Nov 24 08:39:23 crc kubenswrapper[4718]: I1124 08:39:23.600351 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lvfl4" Nov 24 08:39:23 crc kubenswrapper[4718]: I1124 08:39:23.709263 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c66ac439-dc6b-46ee-9dd8-1488f7730fa6-catalog-content\") pod \"c66ac439-dc6b-46ee-9dd8-1488f7730fa6\" (UID: \"c66ac439-dc6b-46ee-9dd8-1488f7730fa6\") " Nov 24 08:39:23 crc kubenswrapper[4718]: I1124 08:39:23.709352 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c66ac439-dc6b-46ee-9dd8-1488f7730fa6-utilities\") pod \"c66ac439-dc6b-46ee-9dd8-1488f7730fa6\" (UID: \"c66ac439-dc6b-46ee-9dd8-1488f7730fa6\") " Nov 24 08:39:23 crc kubenswrapper[4718]: I1124 08:39:23.709399 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zmnr\" (UniqueName: \"kubernetes.io/projected/c66ac439-dc6b-46ee-9dd8-1488f7730fa6-kube-api-access-2zmnr\") pod \"c66ac439-dc6b-46ee-9dd8-1488f7730fa6\" (UID: \"c66ac439-dc6b-46ee-9dd8-1488f7730fa6\") " Nov 24 08:39:23 crc kubenswrapper[4718]: I1124 08:39:23.710291 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c66ac439-dc6b-46ee-9dd8-1488f7730fa6-utilities" (OuterVolumeSpecName: "utilities") pod "c66ac439-dc6b-46ee-9dd8-1488f7730fa6" (UID: "c66ac439-dc6b-46ee-9dd8-1488f7730fa6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:39:23 crc kubenswrapper[4718]: I1124 08:39:23.714467 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c66ac439-dc6b-46ee-9dd8-1488f7730fa6-kube-api-access-2zmnr" (OuterVolumeSpecName: "kube-api-access-2zmnr") pod "c66ac439-dc6b-46ee-9dd8-1488f7730fa6" (UID: "c66ac439-dc6b-46ee-9dd8-1488f7730fa6"). InnerVolumeSpecName "kube-api-access-2zmnr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:39:23 crc kubenswrapper[4718]: I1124 08:39:23.789933 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c66ac439-dc6b-46ee-9dd8-1488f7730fa6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c66ac439-dc6b-46ee-9dd8-1488f7730fa6" (UID: "c66ac439-dc6b-46ee-9dd8-1488f7730fa6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:39:23 crc kubenswrapper[4718]: I1124 08:39:23.811124 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c66ac439-dc6b-46ee-9dd8-1488f7730fa6-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:23 crc kubenswrapper[4718]: I1124 08:39:23.811184 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zmnr\" (UniqueName: \"kubernetes.io/projected/c66ac439-dc6b-46ee-9dd8-1488f7730fa6-kube-api-access-2zmnr\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:23 crc kubenswrapper[4718]: I1124 08:39:23.811195 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c66ac439-dc6b-46ee-9dd8-1488f7730fa6-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:24 crc kubenswrapper[4718]: I1124 08:39:24.296782 4718 generic.go:334] "Generic (PLEG): container finished" podID="c66ac439-dc6b-46ee-9dd8-1488f7730fa6" containerID="c3e4da15783a02bf1ac359d611f11e95cc9ea3f3b693dabdd6f63d818ee47423" exitCode=0 Nov 24 08:39:24 crc kubenswrapper[4718]: I1124 08:39:24.296847 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lvfl4" Nov 24 08:39:24 crc kubenswrapper[4718]: I1124 08:39:24.296838 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lvfl4" event={"ID":"c66ac439-dc6b-46ee-9dd8-1488f7730fa6","Type":"ContainerDied","Data":"c3e4da15783a02bf1ac359d611f11e95cc9ea3f3b693dabdd6f63d818ee47423"} Nov 24 08:39:24 crc kubenswrapper[4718]: I1124 08:39:24.297001 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lvfl4" event={"ID":"c66ac439-dc6b-46ee-9dd8-1488f7730fa6","Type":"ContainerDied","Data":"4c776c0125c2cd0770c40ca631fcebe878db03300dcad007f871fbadf11cd655"} Nov 24 08:39:24 crc kubenswrapper[4718]: I1124 08:39:24.297021 4718 scope.go:117] "RemoveContainer" containerID="c3e4da15783a02bf1ac359d611f11e95cc9ea3f3b693dabdd6f63d818ee47423" Nov 24 08:39:24 crc kubenswrapper[4718]: I1124 08:39:24.310471 4718 scope.go:117] "RemoveContainer" containerID="3ee7010f5ab770e7d5b5deacabb10027da818996fe24234e36644b014c57e1b3" Nov 24 08:39:24 crc kubenswrapper[4718]: I1124 08:39:24.322411 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lvfl4"] Nov 24 08:39:24 crc kubenswrapper[4718]: I1124 08:39:24.325867 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lvfl4"] Nov 24 08:39:24 crc kubenswrapper[4718]: I1124 08:39:24.347244 4718 scope.go:117] "RemoveContainer" containerID="828e6ac4f5de745ff1391d490018cb3ddd128868a1b31c9af809e0d189358c00" Nov 24 08:39:24 crc kubenswrapper[4718]: I1124 08:39:24.362417 4718 scope.go:117] "RemoveContainer" containerID="c3e4da15783a02bf1ac359d611f11e95cc9ea3f3b693dabdd6f63d818ee47423" Nov 24 08:39:24 crc kubenswrapper[4718]: E1124 08:39:24.362936 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3e4da15783a02bf1ac359d611f11e95cc9ea3f3b693dabdd6f63d818ee47423\": container with ID starting with c3e4da15783a02bf1ac359d611f11e95cc9ea3f3b693dabdd6f63d818ee47423 not found: ID does not exist" containerID="c3e4da15783a02bf1ac359d611f11e95cc9ea3f3b693dabdd6f63d818ee47423" Nov 24 08:39:24 crc kubenswrapper[4718]: I1124 08:39:24.363025 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3e4da15783a02bf1ac359d611f11e95cc9ea3f3b693dabdd6f63d818ee47423"} err="failed to get container status \"c3e4da15783a02bf1ac359d611f11e95cc9ea3f3b693dabdd6f63d818ee47423\": rpc error: code = NotFound desc = could not find container \"c3e4da15783a02bf1ac359d611f11e95cc9ea3f3b693dabdd6f63d818ee47423\": container with ID starting with c3e4da15783a02bf1ac359d611f11e95cc9ea3f3b693dabdd6f63d818ee47423 not found: ID does not exist" Nov 24 08:39:24 crc kubenswrapper[4718]: I1124 08:39:24.363056 4718 scope.go:117] "RemoveContainer" containerID="3ee7010f5ab770e7d5b5deacabb10027da818996fe24234e36644b014c57e1b3" Nov 24 08:39:24 crc kubenswrapper[4718]: E1124 08:39:24.363429 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ee7010f5ab770e7d5b5deacabb10027da818996fe24234e36644b014c57e1b3\": container with ID starting with 3ee7010f5ab770e7d5b5deacabb10027da818996fe24234e36644b014c57e1b3 not found: ID does not exist" containerID="3ee7010f5ab770e7d5b5deacabb10027da818996fe24234e36644b014c57e1b3" Nov 24 08:39:24 crc kubenswrapper[4718]: I1124 08:39:24.363461 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ee7010f5ab770e7d5b5deacabb10027da818996fe24234e36644b014c57e1b3"} err="failed to get container status \"3ee7010f5ab770e7d5b5deacabb10027da818996fe24234e36644b014c57e1b3\": rpc error: code = NotFound desc = could not find container \"3ee7010f5ab770e7d5b5deacabb10027da818996fe24234e36644b014c57e1b3\": container with ID starting with 3ee7010f5ab770e7d5b5deacabb10027da818996fe24234e36644b014c57e1b3 not found: ID does not exist" Nov 24 08:39:24 crc kubenswrapper[4718]: I1124 08:39:24.363486 4718 scope.go:117] "RemoveContainer" containerID="828e6ac4f5de745ff1391d490018cb3ddd128868a1b31c9af809e0d189358c00" Nov 24 08:39:24 crc kubenswrapper[4718]: E1124 08:39:24.363735 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"828e6ac4f5de745ff1391d490018cb3ddd128868a1b31c9af809e0d189358c00\": container with ID starting with 828e6ac4f5de745ff1391d490018cb3ddd128868a1b31c9af809e0d189358c00 not found: ID does not exist" containerID="828e6ac4f5de745ff1391d490018cb3ddd128868a1b31c9af809e0d189358c00" Nov 24 08:39:24 crc kubenswrapper[4718]: I1124 08:39:24.363757 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"828e6ac4f5de745ff1391d490018cb3ddd128868a1b31c9af809e0d189358c00"} err="failed to get container status \"828e6ac4f5de745ff1391d490018cb3ddd128868a1b31c9af809e0d189358c00\": rpc error: code = NotFound desc = could not find container \"828e6ac4f5de745ff1391d490018cb3ddd128868a1b31c9af809e0d189358c00\": container with ID starting with 828e6ac4f5de745ff1391d490018cb3ddd128868a1b31c9af809e0d189358c00 not found: ID does not exist" Nov 24 08:39:24 crc kubenswrapper[4718]: I1124 08:39:24.602104 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c66ac439-dc6b-46ee-9dd8-1488f7730fa6" path="/var/lib/kubelet/pods/c66ac439-dc6b-46ee-9dd8-1488f7730fa6/volumes" Nov 24 08:39:27 crc kubenswrapper[4718]: I1124 08:39:27.901090 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gl4gc" Nov 24 08:39:27 crc kubenswrapper[4718]: I1124 08:39:27.935875 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gl4gc" Nov 24 08:39:28 crc kubenswrapper[4718]: I1124 08:39:28.630062 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gl4gc"] Nov 24 08:39:29 crc kubenswrapper[4718]: I1124 08:39:29.324671 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gl4gc" podUID="baad1930-cdc6-4c31-b707-b3f35554f711" containerName="registry-server" containerID="cri-o://07d3d4ccd755ab2376a3c895860d1fd9ed0f7f4cf2d0c0dd0155d50fdc9d6c15" gracePeriod=2 Nov 24 08:39:29 crc kubenswrapper[4718]: I1124 08:39:29.691726 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gl4gc" Nov 24 08:39:29 crc kubenswrapper[4718]: I1124 08:39:29.798136 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baad1930-cdc6-4c31-b707-b3f35554f711-utilities\") pod \"baad1930-cdc6-4c31-b707-b3f35554f711\" (UID: \"baad1930-cdc6-4c31-b707-b3f35554f711\") " Nov 24 08:39:29 crc kubenswrapper[4718]: I1124 08:39:29.798233 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvjnt\" (UniqueName: \"kubernetes.io/projected/baad1930-cdc6-4c31-b707-b3f35554f711-kube-api-access-wvjnt\") pod \"baad1930-cdc6-4c31-b707-b3f35554f711\" (UID: \"baad1930-cdc6-4c31-b707-b3f35554f711\") " Nov 24 08:39:29 crc kubenswrapper[4718]: I1124 08:39:29.798291 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baad1930-cdc6-4c31-b707-b3f35554f711-catalog-content\") pod \"baad1930-cdc6-4c31-b707-b3f35554f711\" (UID: \"baad1930-cdc6-4c31-b707-b3f35554f711\") " Nov 24 08:39:29 crc kubenswrapper[4718]: I1124 08:39:29.799092 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/baad1930-cdc6-4c31-b707-b3f35554f711-utilities" (OuterVolumeSpecName: "utilities") pod "baad1930-cdc6-4c31-b707-b3f35554f711" (UID: "baad1930-cdc6-4c31-b707-b3f35554f711"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:39:29 crc kubenswrapper[4718]: I1124 08:39:29.808269 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/baad1930-cdc6-4c31-b707-b3f35554f711-kube-api-access-wvjnt" (OuterVolumeSpecName: "kube-api-access-wvjnt") pod "baad1930-cdc6-4c31-b707-b3f35554f711" (UID: "baad1930-cdc6-4c31-b707-b3f35554f711"). InnerVolumeSpecName "kube-api-access-wvjnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:39:29 crc kubenswrapper[4718]: I1124 08:39:29.850371 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/baad1930-cdc6-4c31-b707-b3f35554f711-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "baad1930-cdc6-4c31-b707-b3f35554f711" (UID: "baad1930-cdc6-4c31-b707-b3f35554f711"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:39:29 crc kubenswrapper[4718]: I1124 08:39:29.899795 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baad1930-cdc6-4c31-b707-b3f35554f711-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:29 crc kubenswrapper[4718]: I1124 08:39:29.899832 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baad1930-cdc6-4c31-b707-b3f35554f711-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:29 crc kubenswrapper[4718]: I1124 08:39:29.899843 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvjnt\" (UniqueName: \"kubernetes.io/projected/baad1930-cdc6-4c31-b707-b3f35554f711-kube-api-access-wvjnt\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:30 crc kubenswrapper[4718]: I1124 08:39:30.332348 4718 generic.go:334] "Generic (PLEG): container finished" podID="baad1930-cdc6-4c31-b707-b3f35554f711" containerID="07d3d4ccd755ab2376a3c895860d1fd9ed0f7f4cf2d0c0dd0155d50fdc9d6c15" exitCode=0 Nov 24 08:39:30 crc kubenswrapper[4718]: I1124 08:39:30.332501 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gl4gc" Nov 24 08:39:30 crc kubenswrapper[4718]: I1124 08:39:30.332504 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gl4gc" event={"ID":"baad1930-cdc6-4c31-b707-b3f35554f711","Type":"ContainerDied","Data":"07d3d4ccd755ab2376a3c895860d1fd9ed0f7f4cf2d0c0dd0155d50fdc9d6c15"} Nov 24 08:39:30 crc kubenswrapper[4718]: I1124 08:39:30.332612 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gl4gc" event={"ID":"baad1930-cdc6-4c31-b707-b3f35554f711","Type":"ContainerDied","Data":"5d669ebb0744bdf8baacd66bea0501f412d377002327418801db21738963a6c1"} Nov 24 08:39:30 crc kubenswrapper[4718]: I1124 08:39:30.332634 4718 scope.go:117] "RemoveContainer" containerID="07d3d4ccd755ab2376a3c895860d1fd9ed0f7f4cf2d0c0dd0155d50fdc9d6c15" Nov 24 08:39:30 crc kubenswrapper[4718]: I1124 08:39:30.346851 4718 scope.go:117] "RemoveContainer" containerID="d0663cbbe92f6821fd606f986b58e04167aa8043853c85a34712ea422c3daa73" Nov 24 08:39:30 crc kubenswrapper[4718]: I1124 08:39:30.360341 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gl4gc"] Nov 24 08:39:30 crc kubenswrapper[4718]: I1124 08:39:30.364767 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gl4gc"] Nov 24 08:39:30 crc kubenswrapper[4718]: I1124 08:39:30.364902 4718 scope.go:117] "RemoveContainer" containerID="229a4214fa6a6c6f5f30ea0c2a915b852eeca6d3f428d880d82b29754136bcb1" Nov 24 08:39:30 crc kubenswrapper[4718]: I1124 08:39:30.393902 4718 scope.go:117] "RemoveContainer" containerID="07d3d4ccd755ab2376a3c895860d1fd9ed0f7f4cf2d0c0dd0155d50fdc9d6c15" Nov 24 08:39:30 crc kubenswrapper[4718]: E1124 08:39:30.394718 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07d3d4ccd755ab2376a3c895860d1fd9ed0f7f4cf2d0c0dd0155d50fdc9d6c15\": container with ID starting with 07d3d4ccd755ab2376a3c895860d1fd9ed0f7f4cf2d0c0dd0155d50fdc9d6c15 not found: ID does not exist" containerID="07d3d4ccd755ab2376a3c895860d1fd9ed0f7f4cf2d0c0dd0155d50fdc9d6c15" Nov 24 08:39:30 crc kubenswrapper[4718]: I1124 08:39:30.394771 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07d3d4ccd755ab2376a3c895860d1fd9ed0f7f4cf2d0c0dd0155d50fdc9d6c15"} err="failed to get container status \"07d3d4ccd755ab2376a3c895860d1fd9ed0f7f4cf2d0c0dd0155d50fdc9d6c15\": rpc error: code = NotFound desc = could not find container \"07d3d4ccd755ab2376a3c895860d1fd9ed0f7f4cf2d0c0dd0155d50fdc9d6c15\": container with ID starting with 07d3d4ccd755ab2376a3c895860d1fd9ed0f7f4cf2d0c0dd0155d50fdc9d6c15 not found: ID does not exist" Nov 24 08:39:30 crc kubenswrapper[4718]: I1124 08:39:30.394800 4718 scope.go:117] "RemoveContainer" containerID="d0663cbbe92f6821fd606f986b58e04167aa8043853c85a34712ea422c3daa73" Nov 24 08:39:30 crc kubenswrapper[4718]: E1124 08:39:30.395354 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0663cbbe92f6821fd606f986b58e04167aa8043853c85a34712ea422c3daa73\": container with ID starting with d0663cbbe92f6821fd606f986b58e04167aa8043853c85a34712ea422c3daa73 not found: ID does not exist" containerID="d0663cbbe92f6821fd606f986b58e04167aa8043853c85a34712ea422c3daa73" Nov 24 08:39:30 crc kubenswrapper[4718]: I1124 08:39:30.395393 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0663cbbe92f6821fd606f986b58e04167aa8043853c85a34712ea422c3daa73"} err="failed to get container status \"d0663cbbe92f6821fd606f986b58e04167aa8043853c85a34712ea422c3daa73\": rpc error: code = NotFound desc = could not find container \"d0663cbbe92f6821fd606f986b58e04167aa8043853c85a34712ea422c3daa73\": container with ID starting with d0663cbbe92f6821fd606f986b58e04167aa8043853c85a34712ea422c3daa73 not found: ID does not exist" Nov 24 08:39:30 crc kubenswrapper[4718]: I1124 08:39:30.395406 4718 scope.go:117] "RemoveContainer" containerID="229a4214fa6a6c6f5f30ea0c2a915b852eeca6d3f428d880d82b29754136bcb1" Nov 24 08:39:30 crc kubenswrapper[4718]: E1124 08:39:30.395746 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"229a4214fa6a6c6f5f30ea0c2a915b852eeca6d3f428d880d82b29754136bcb1\": container with ID starting with 229a4214fa6a6c6f5f30ea0c2a915b852eeca6d3f428d880d82b29754136bcb1 not found: ID does not exist" containerID="229a4214fa6a6c6f5f30ea0c2a915b852eeca6d3f428d880d82b29754136bcb1" Nov 24 08:39:30 crc kubenswrapper[4718]: I1124 08:39:30.395776 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"229a4214fa6a6c6f5f30ea0c2a915b852eeca6d3f428d880d82b29754136bcb1"} err="failed to get container status \"229a4214fa6a6c6f5f30ea0c2a915b852eeca6d3f428d880d82b29754136bcb1\": rpc error: code = NotFound desc = could not find container \"229a4214fa6a6c6f5f30ea0c2a915b852eeca6d3f428d880d82b29754136bcb1\": container with ID starting with 229a4214fa6a6c6f5f30ea0c2a915b852eeca6d3f428d880d82b29754136bcb1 not found: ID does not exist" Nov 24 08:39:30 crc kubenswrapper[4718]: I1124 08:39:30.604377 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="baad1930-cdc6-4c31-b707-b3f35554f711" path="/var/lib/kubelet/pods/baad1930-cdc6-4c31-b707-b3f35554f711/volumes" Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.476732 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" podUID="7bb84203-369a-468c-9b00-c4a5650b88c8" containerName="oauth-openshift" containerID="cri-o://52a4928d6c478331a2f1eb3a1fb2c01120a41157e5e232799cf6c463660488f0" gracePeriod=15 Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.818743 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.965570 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-cliconfig\") pod \"7bb84203-369a-468c-9b00-c4a5650b88c8\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.965629 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-audit-policies\") pod \"7bb84203-369a-468c-9b00-c4a5650b88c8\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.965660 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-service-ca\") pod \"7bb84203-369a-468c-9b00-c4a5650b88c8\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.965698 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-ocp-branding-template\") pod \"7bb84203-369a-468c-9b00-c4a5650b88c8\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.965724 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btwkr\" (UniqueName: \"kubernetes.io/projected/7bb84203-369a-468c-9b00-c4a5650b88c8-kube-api-access-btwkr\") pod \"7bb84203-369a-468c-9b00-c4a5650b88c8\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.965745 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-template-login\") pod \"7bb84203-369a-468c-9b00-c4a5650b88c8\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.965765 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7bb84203-369a-468c-9b00-c4a5650b88c8-audit-dir\") pod \"7bb84203-369a-468c-9b00-c4a5650b88c8\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.965784 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-router-certs\") pod \"7bb84203-369a-468c-9b00-c4a5650b88c8\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.965808 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-idp-0-file-data\") pod \"7bb84203-369a-468c-9b00-c4a5650b88c8\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.965827 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-trusted-ca-bundle\") pod \"7bb84203-369a-468c-9b00-c4a5650b88c8\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.965876 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-template-error\") pod \"7bb84203-369a-468c-9b00-c4a5650b88c8\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.965898 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-template-provider-selection\") pod \"7bb84203-369a-468c-9b00-c4a5650b88c8\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.965919 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-session\") pod \"7bb84203-369a-468c-9b00-c4a5650b88c8\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.965943 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-serving-cert\") pod \"7bb84203-369a-468c-9b00-c4a5650b88c8\" (UID: \"7bb84203-369a-468c-9b00-c4a5650b88c8\") " Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.966460 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "7bb84203-369a-468c-9b00-c4a5650b88c8" (UID: "7bb84203-369a-468c-9b00-c4a5650b88c8"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.966473 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "7bb84203-369a-468c-9b00-c4a5650b88c8" (UID: "7bb84203-369a-468c-9b00-c4a5650b88c8"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.966882 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "7bb84203-369a-468c-9b00-c4a5650b88c8" (UID: "7bb84203-369a-468c-9b00-c4a5650b88c8"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.966954 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7bb84203-369a-468c-9b00-c4a5650b88c8-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "7bb84203-369a-468c-9b00-c4a5650b88c8" (UID: "7bb84203-369a-468c-9b00-c4a5650b88c8"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.967080 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "7bb84203-369a-468c-9b00-c4a5650b88c8" (UID: "7bb84203-369a-468c-9b00-c4a5650b88c8"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.972029 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "7bb84203-369a-468c-9b00-c4a5650b88c8" (UID: "7bb84203-369a-468c-9b00-c4a5650b88c8"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.972424 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb84203-369a-468c-9b00-c4a5650b88c8-kube-api-access-btwkr" (OuterVolumeSpecName: "kube-api-access-btwkr") pod "7bb84203-369a-468c-9b00-c4a5650b88c8" (UID: "7bb84203-369a-468c-9b00-c4a5650b88c8"). InnerVolumeSpecName "kube-api-access-btwkr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.972620 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "7bb84203-369a-468c-9b00-c4a5650b88c8" (UID: "7bb84203-369a-468c-9b00-c4a5650b88c8"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.972963 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "7bb84203-369a-468c-9b00-c4a5650b88c8" (UID: "7bb84203-369a-468c-9b00-c4a5650b88c8"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.973214 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "7bb84203-369a-468c-9b00-c4a5650b88c8" (UID: "7bb84203-369a-468c-9b00-c4a5650b88c8"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.973802 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "7bb84203-369a-468c-9b00-c4a5650b88c8" (UID: "7bb84203-369a-468c-9b00-c4a5650b88c8"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.974266 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "7bb84203-369a-468c-9b00-c4a5650b88c8" (UID: "7bb84203-369a-468c-9b00-c4a5650b88c8"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.978289 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "7bb84203-369a-468c-9b00-c4a5650b88c8" (UID: "7bb84203-369a-468c-9b00-c4a5650b88c8"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:39:34 crc kubenswrapper[4718]: I1124 08:39:34.979489 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "7bb84203-369a-468c-9b00-c4a5650b88c8" (UID: "7bb84203-369a-468c-9b00-c4a5650b88c8"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.067284 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.067341 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.067360 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.067375 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.067395 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.067410 4718 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.067425 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.067439 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.067453 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btwkr\" (UniqueName: \"kubernetes.io/projected/7bb84203-369a-468c-9b00-c4a5650b88c8-kube-api-access-btwkr\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.067467 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.067480 4718 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7bb84203-369a-468c-9b00-c4a5650b88c8-audit-dir\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.067499 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.067516 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.067534 4718 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7bb84203-369a-468c-9b00-c4a5650b88c8-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.362805 4718 generic.go:334] "Generic (PLEG): container finished" podID="7bb84203-369a-468c-9b00-c4a5650b88c8" containerID="52a4928d6c478331a2f1eb3a1fb2c01120a41157e5e232799cf6c463660488f0" exitCode=0 Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.362873 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" event={"ID":"7bb84203-369a-468c-9b00-c4a5650b88c8","Type":"ContainerDied","Data":"52a4928d6c478331a2f1eb3a1fb2c01120a41157e5e232799cf6c463660488f0"} Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.362879 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.362911 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xjcpp" event={"ID":"7bb84203-369a-468c-9b00-c4a5650b88c8","Type":"ContainerDied","Data":"93ddd6dfb68003620fc60128ad76fc99396d182e8fa12b00a2eea855913af132"} Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.362934 4718 scope.go:117] "RemoveContainer" containerID="52a4928d6c478331a2f1eb3a1fb2c01120a41157e5e232799cf6c463660488f0" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.381359 4718 scope.go:117] "RemoveContainer" containerID="52a4928d6c478331a2f1eb3a1fb2c01120a41157e5e232799cf6c463660488f0" Nov 24 08:39:35 crc kubenswrapper[4718]: E1124 08:39:35.381925 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52a4928d6c478331a2f1eb3a1fb2c01120a41157e5e232799cf6c463660488f0\": container with ID starting with 52a4928d6c478331a2f1eb3a1fb2c01120a41157e5e232799cf6c463660488f0 not found: ID does not exist" containerID="52a4928d6c478331a2f1eb3a1fb2c01120a41157e5e232799cf6c463660488f0" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.382020 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52a4928d6c478331a2f1eb3a1fb2c01120a41157e5e232799cf6c463660488f0"} err="failed to get container status \"52a4928d6c478331a2f1eb3a1fb2c01120a41157e5e232799cf6c463660488f0\": rpc error: code = NotFound desc = could not find container \"52a4928d6c478331a2f1eb3a1fb2c01120a41157e5e232799cf6c463660488f0\": container with ID starting with 52a4928d6c478331a2f1eb3a1fb2c01120a41157e5e232799cf6c463660488f0 not found: ID does not exist" Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.397159 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xjcpp"] Nov 24 08:39:35 crc kubenswrapper[4718]: I1124 08:39:35.399617 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xjcpp"] Nov 24 08:39:36 crc kubenswrapper[4718]: I1124 08:39:36.602644 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb84203-369a-468c-9b00-c4a5650b88c8" path="/var/lib/kubelet/pods/7bb84203-369a-468c-9b00-c4a5650b88c8/volumes" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.888540 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd"] Nov 24 08:39:37 crc kubenswrapper[4718]: E1124 08:39:37.888771 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72a76ec8-c870-4c07-a703-a4ac3e9e97e8" containerName="extract-content" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.888786 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="72a76ec8-c870-4c07-a703-a4ac3e9e97e8" containerName="extract-content" Nov 24 08:39:37 crc kubenswrapper[4718]: E1124 08:39:37.888801 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baad1930-cdc6-4c31-b707-b3f35554f711" containerName="extract-utilities" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.888811 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="baad1930-cdc6-4c31-b707-b3f35554f711" containerName="extract-utilities" Nov 24 08:39:37 crc kubenswrapper[4718]: E1124 08:39:37.888824 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c66ac439-dc6b-46ee-9dd8-1488f7730fa6" containerName="extract-utilities" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.888833 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="c66ac439-dc6b-46ee-9dd8-1488f7730fa6" containerName="extract-utilities" Nov 24 08:39:37 crc kubenswrapper[4718]: E1124 08:39:37.888842 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baad1930-cdc6-4c31-b707-b3f35554f711" containerName="registry-server" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.888850 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="baad1930-cdc6-4c31-b707-b3f35554f711" containerName="registry-server" Nov 24 08:39:37 crc kubenswrapper[4718]: E1124 08:39:37.888864 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72a76ec8-c870-4c07-a703-a4ac3e9e97e8" containerName="registry-server" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.888872 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="72a76ec8-c870-4c07-a703-a4ac3e9e97e8" containerName="registry-server" Nov 24 08:39:37 crc kubenswrapper[4718]: E1124 08:39:37.888880 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bb84203-369a-468c-9b00-c4a5650b88c8" containerName="oauth-openshift" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.888889 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bb84203-369a-468c-9b00-c4a5650b88c8" containerName="oauth-openshift" Nov 24 08:39:37 crc kubenswrapper[4718]: E1124 08:39:37.888902 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72a76ec8-c870-4c07-a703-a4ac3e9e97e8" containerName="extract-utilities" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.888910 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="72a76ec8-c870-4c07-a703-a4ac3e9e97e8" containerName="extract-utilities" Nov 24 08:39:37 crc kubenswrapper[4718]: E1124 08:39:37.888921 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74d63734-18c2-48c4-b472-bea0a1cb43e0" containerName="pruner" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.888929 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="74d63734-18c2-48c4-b472-bea0a1cb43e0" containerName="pruner" Nov 24 08:39:37 crc kubenswrapper[4718]: E1124 08:39:37.888943 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baad1930-cdc6-4c31-b707-b3f35554f711" containerName="extract-content" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.888950 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="baad1930-cdc6-4c31-b707-b3f35554f711" containerName="extract-content" Nov 24 08:39:37 crc kubenswrapper[4718]: E1124 08:39:37.888961 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c66ac439-dc6b-46ee-9dd8-1488f7730fa6" containerName="registry-server" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.888987 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="c66ac439-dc6b-46ee-9dd8-1488f7730fa6" containerName="registry-server" Nov 24 08:39:37 crc kubenswrapper[4718]: E1124 08:39:37.889001 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4eeb5aa-31cb-4c3e-8045-8132ac10b348" containerName="extract-utilities" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.889011 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4eeb5aa-31cb-4c3e-8045-8132ac10b348" containerName="extract-utilities" Nov 24 08:39:37 crc kubenswrapper[4718]: E1124 08:39:37.889028 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4eeb5aa-31cb-4c3e-8045-8132ac10b348" containerName="extract-content" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.889035 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4eeb5aa-31cb-4c3e-8045-8132ac10b348" containerName="extract-content" Nov 24 08:39:37 crc kubenswrapper[4718]: E1124 08:39:37.889046 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4eeb5aa-31cb-4c3e-8045-8132ac10b348" containerName="registry-server" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.889054 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4eeb5aa-31cb-4c3e-8045-8132ac10b348" containerName="registry-server" Nov 24 08:39:37 crc kubenswrapper[4718]: E1124 08:39:37.889067 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c66ac439-dc6b-46ee-9dd8-1488f7730fa6" containerName="extract-content" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.889074 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="c66ac439-dc6b-46ee-9dd8-1488f7730fa6" containerName="extract-content" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.889186 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="c66ac439-dc6b-46ee-9dd8-1488f7730fa6" containerName="registry-server" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.889205 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="74d63734-18c2-48c4-b472-bea0a1cb43e0" containerName="pruner" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.889218 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bb84203-369a-468c-9b00-c4a5650b88c8" containerName="oauth-openshift" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.889229 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4eeb5aa-31cb-4c3e-8045-8132ac10b348" containerName="registry-server" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.889240 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="baad1930-cdc6-4c31-b707-b3f35554f711" containerName="registry-server" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.889253 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="72a76ec8-c870-4c07-a703-a4ac3e9e97e8" containerName="registry-server" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.889732 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.893402 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.893439 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.893713 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.893822 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.893986 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.894138 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.894604 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.900923 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.901482 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.901630 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.901790 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.901872 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.904318 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.904364 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd"] Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.907658 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.919877 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.944212 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-user-template-login\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.944279 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/11606c3f-869f-461e-a354-ff07ac475ba8-audit-dir\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.944374 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-session\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.944396 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.944432 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-router-certs\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.944478 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-user-template-error\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.944498 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-service-ca\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.944523 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.944551 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.944572 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvtkd\" (UniqueName: \"kubernetes.io/projected/11606c3f-869f-461e-a354-ff07ac475ba8-kube-api-access-mvtkd\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.944596 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/11606c3f-869f-461e-a354-ff07ac475ba8-audit-policies\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.944620 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.944656 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:37 crc kubenswrapper[4718]: I1124 08:39:37.944789 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.046079 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-router-certs\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.046160 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-user-template-error\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.046191 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-service-ca\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.046223 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.046247 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.046274 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvtkd\" (UniqueName: \"kubernetes.io/projected/11606c3f-869f-461e-a354-ff07ac475ba8-kube-api-access-mvtkd\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.046314 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/11606c3f-869f-461e-a354-ff07ac475ba8-audit-policies\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.046353 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.046400 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.046429 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.046459 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-user-template-login\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.046489 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/11606c3f-869f-461e-a354-ff07ac475ba8-audit-dir\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.046518 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-session\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.046543 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.047632 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-service-ca\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.047769 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.048003 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/11606c3f-869f-461e-a354-ff07ac475ba8-audit-dir\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.048094 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/11606c3f-869f-461e-a354-ff07ac475ba8-audit-policies\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.048120 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.052071 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-user-template-error\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.052076 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-router-certs\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.052620 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.053143 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-session\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.053254 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-user-template-login\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.053550 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.053585 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.054288 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/11606c3f-869f-461e-a354-ff07ac475ba8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.063867 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvtkd\" (UniqueName: \"kubernetes.io/projected/11606c3f-869f-461e-a354-ff07ac475ba8-kube-api-access-mvtkd\") pod \"oauth-openshift-679cb4ddc5-zzpjd\" (UID: \"11606c3f-869f-461e-a354-ff07ac475ba8\") " pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.227714 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.450996 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd"] Nov 24 08:39:38 crc kubenswrapper[4718]: I1124 08:39:38.549753 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" event={"ID":"11606c3f-869f-461e-a354-ff07ac475ba8","Type":"ContainerStarted","Data":"8c09b95143c9e86480da423ab1710cbe64d384d9017968a00f128558f25e6a1e"} Nov 24 08:39:39 crc kubenswrapper[4718]: I1124 08:39:39.558697 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" event={"ID":"11606c3f-869f-461e-a354-ff07ac475ba8","Type":"ContainerStarted","Data":"3a6d3a6cdb39e873cae724331231ccfbfe6ac980720895ddbb3a2ead2bf03cb9"} Nov 24 08:39:39 crc kubenswrapper[4718]: I1124 08:39:39.559332 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:39 crc kubenswrapper[4718]: I1124 08:39:39.565449 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" Nov 24 08:39:39 crc kubenswrapper[4718]: I1124 08:39:39.602927 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-679cb4ddc5-zzpjd" podStartSLOduration=30.602906407 podStartE2EDuration="30.602906407s" podCreationTimestamp="2025-11-24 08:39:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:39:39.585378344 +0000 UTC m=+251.701669248" watchObservedRunningTime="2025-11-24 08:39:39.602906407 +0000 UTC m=+251.719197311" Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.626258 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qcdbm"] Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.627072 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qcdbm" podUID="0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16" containerName="registry-server" containerID="cri-o://8c9ccfa7d475d090583a91d4871f3dfecdaa41f6b6eccb1193c8dbaaa34d2bf1" gracePeriod=30 Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.636866 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9zgmp"] Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.637375 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9zgmp" podUID="44264beb-2b14-47a9-9da4-18ca5e19d282" containerName="registry-server" containerID="cri-o://66436d056290897fb2eec3859e284233c54d45823fc35ac04bf5781c559ed14f" gracePeriod=30 Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.642920 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-z4pwz"] Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.643189 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" podUID="d037ebb5-19ab-471e-b627-3b0487dfa12c" containerName="marketplace-operator" containerID="cri-o://1d44db86d76946d29d4aacddf78eb37ec641c041219ce4a97e55e4621d050999" gracePeriod=30 Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.658879 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w8rfg"] Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.659187 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-w8rfg" podUID="39df5500-d2c6-4c61-be65-cc5598f8201d" containerName="registry-server" containerID="cri-o://df9fec50a376b40d04ce584c85cd9480a0dc77d378793ce49686185e5b9af75d" gracePeriod=30 Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.665535 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zqgzd"] Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.666246 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-zqgzd" Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.680276 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zpbbq"] Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.680553 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zpbbq" podUID="3f4a042f-effa-4f5a-ac2d-4d378b0f15a5" containerName="registry-server" containerID="cri-o://5c76ce6432a5bb91e4314caffe626fb0319a70d68b99387a1f95fde0e60321b1" gracePeriod=30 Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.689121 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zqgzd"] Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.719638 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmkqg\" (UniqueName: \"kubernetes.io/projected/5fb52afb-b455-44d0-ad14-36a8f2790af2-kube-api-access-gmkqg\") pod \"marketplace-operator-79b997595-zqgzd\" (UID: \"5fb52afb-b455-44d0-ad14-36a8f2790af2\") " pod="openshift-marketplace/marketplace-operator-79b997595-zqgzd" Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.719702 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5fb52afb-b455-44d0-ad14-36a8f2790af2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-zqgzd\" (UID: \"5fb52afb-b455-44d0-ad14-36a8f2790af2\") " pod="openshift-marketplace/marketplace-operator-79b997595-zqgzd" Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.719728 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5fb52afb-b455-44d0-ad14-36a8f2790af2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-zqgzd\" (UID: \"5fb52afb-b455-44d0-ad14-36a8f2790af2\") " pod="openshift-marketplace/marketplace-operator-79b997595-zqgzd" Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.820567 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmkqg\" (UniqueName: \"kubernetes.io/projected/5fb52afb-b455-44d0-ad14-36a8f2790af2-kube-api-access-gmkqg\") pod \"marketplace-operator-79b997595-zqgzd\" (UID: \"5fb52afb-b455-44d0-ad14-36a8f2790af2\") " pod="openshift-marketplace/marketplace-operator-79b997595-zqgzd" Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.820628 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5fb52afb-b455-44d0-ad14-36a8f2790af2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-zqgzd\" (UID: \"5fb52afb-b455-44d0-ad14-36a8f2790af2\") " pod="openshift-marketplace/marketplace-operator-79b997595-zqgzd" Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.820657 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5fb52afb-b455-44d0-ad14-36a8f2790af2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-zqgzd\" (UID: \"5fb52afb-b455-44d0-ad14-36a8f2790af2\") " pod="openshift-marketplace/marketplace-operator-79b997595-zqgzd" Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.822283 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5fb52afb-b455-44d0-ad14-36a8f2790af2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-zqgzd\" (UID: \"5fb52afb-b455-44d0-ad14-36a8f2790af2\") " pod="openshift-marketplace/marketplace-operator-79b997595-zqgzd" Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.826108 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5fb52afb-b455-44d0-ad14-36a8f2790af2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-zqgzd\" (UID: \"5fb52afb-b455-44d0-ad14-36a8f2790af2\") " pod="openshift-marketplace/marketplace-operator-79b997595-zqgzd" Nov 24 08:39:51 crc kubenswrapper[4718]: I1124 08:39:51.838556 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmkqg\" (UniqueName: \"kubernetes.io/projected/5fb52afb-b455-44d0-ad14-36a8f2790af2-kube-api-access-gmkqg\") pod \"marketplace-operator-79b997595-zqgzd\" (UID: \"5fb52afb-b455-44d0-ad14-36a8f2790af2\") " pod="openshift-marketplace/marketplace-operator-79b997595-zqgzd" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.037885 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-zqgzd" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.049307 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qcdbm" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.051778 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w8rfg" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.057750 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.094359 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zpbbq" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.125210 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-494qt\" (UniqueName: \"kubernetes.io/projected/d037ebb5-19ab-471e-b627-3b0487dfa12c-kube-api-access-494qt\") pod \"d037ebb5-19ab-471e-b627-3b0487dfa12c\" (UID: \"d037ebb5-19ab-471e-b627-3b0487dfa12c\") " Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.125336 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d037ebb5-19ab-471e-b627-3b0487dfa12c-marketplace-operator-metrics\") pod \"d037ebb5-19ab-471e-b627-3b0487dfa12c\" (UID: \"d037ebb5-19ab-471e-b627-3b0487dfa12c\") " Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.125429 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnll9\" (UniqueName: \"kubernetes.io/projected/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16-kube-api-access-nnll9\") pod \"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16\" (UID: \"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16\") " Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.125463 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-664lq\" (UniqueName: \"kubernetes.io/projected/39df5500-d2c6-4c61-be65-cc5598f8201d-kube-api-access-664lq\") pod \"39df5500-d2c6-4c61-be65-cc5598f8201d\" (UID: \"39df5500-d2c6-4c61-be65-cc5598f8201d\") " Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.125524 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5-catalog-content\") pod \"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5\" (UID: \"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5\") " Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.125572 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39df5500-d2c6-4c61-be65-cc5598f8201d-catalog-content\") pod \"39df5500-d2c6-4c61-be65-cc5598f8201d\" (UID: \"39df5500-d2c6-4c61-be65-cc5598f8201d\") " Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.129889 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16-catalog-content\") pod \"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16\" (UID: \"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16\") " Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.129998 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39df5500-d2c6-4c61-be65-cc5598f8201d-utilities\") pod \"39df5500-d2c6-4c61-be65-cc5598f8201d\" (UID: \"39df5500-d2c6-4c61-be65-cc5598f8201d\") " Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.130035 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16-utilities\") pod \"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16\" (UID: \"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16\") " Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.130079 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d037ebb5-19ab-471e-b627-3b0487dfa12c-marketplace-trusted-ca\") pod \"d037ebb5-19ab-471e-b627-3b0487dfa12c\" (UID: \"d037ebb5-19ab-471e-b627-3b0487dfa12c\") " Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.130162 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16-kube-api-access-nnll9" (OuterVolumeSpecName: "kube-api-access-nnll9") pod "0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16" (UID: "0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16"). InnerVolumeSpecName "kube-api-access-nnll9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.131142 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39df5500-d2c6-4c61-be65-cc5598f8201d-utilities" (OuterVolumeSpecName: "utilities") pod "39df5500-d2c6-4c61-be65-cc5598f8201d" (UID: "39df5500-d2c6-4c61-be65-cc5598f8201d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.131426 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16-utilities" (OuterVolumeSpecName: "utilities") pod "0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16" (UID: "0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.132090 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39df5500-d2c6-4c61-be65-cc5598f8201d-kube-api-access-664lq" (OuterVolumeSpecName: "kube-api-access-664lq") pod "39df5500-d2c6-4c61-be65-cc5598f8201d" (UID: "39df5500-d2c6-4c61-be65-cc5598f8201d"). InnerVolumeSpecName "kube-api-access-664lq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.133469 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d037ebb5-19ab-471e-b627-3b0487dfa12c-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "d037ebb5-19ab-471e-b627-3b0487dfa12c" (UID: "d037ebb5-19ab-471e-b627-3b0487dfa12c"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.133511 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5-kube-api-access-jklfx" (OuterVolumeSpecName: "kube-api-access-jklfx") pod "3f4a042f-effa-4f5a-ac2d-4d378b0f15a5" (UID: "3f4a042f-effa-4f5a-ac2d-4d378b0f15a5"). InnerVolumeSpecName "kube-api-access-jklfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.135043 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jklfx\" (UniqueName: \"kubernetes.io/projected/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5-kube-api-access-jklfx\") pod \"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5\" (UID: \"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5\") " Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.135114 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5-utilities\") pod \"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5\" (UID: \"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5\") " Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.136051 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39df5500-d2c6-4c61-be65-cc5598f8201d-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.136089 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.136105 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jklfx\" (UniqueName: \"kubernetes.io/projected/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5-kube-api-access-jklfx\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.136120 4718 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d037ebb5-19ab-471e-b627-3b0487dfa12c-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.136134 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnll9\" (UniqueName: \"kubernetes.io/projected/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16-kube-api-access-nnll9\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.136428 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5-utilities" (OuterVolumeSpecName: "utilities") pod "3f4a042f-effa-4f5a-ac2d-4d378b0f15a5" (UID: "3f4a042f-effa-4f5a-ac2d-4d378b0f15a5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.140229 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d037ebb5-19ab-471e-b627-3b0487dfa12c-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "d037ebb5-19ab-471e-b627-3b0487dfa12c" (UID: "d037ebb5-19ab-471e-b627-3b0487dfa12c"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.142284 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d037ebb5-19ab-471e-b627-3b0487dfa12c-kube-api-access-494qt" (OuterVolumeSpecName: "kube-api-access-494qt") pod "d037ebb5-19ab-471e-b627-3b0487dfa12c" (UID: "d037ebb5-19ab-471e-b627-3b0487dfa12c"). InnerVolumeSpecName "kube-api-access-494qt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.146668 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9zgmp" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.149889 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39df5500-d2c6-4c61-be65-cc5598f8201d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "39df5500-d2c6-4c61-be65-cc5598f8201d" (UID: "39df5500-d2c6-4c61-be65-cc5598f8201d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.184934 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16" (UID: "0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.235371 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3f4a042f-effa-4f5a-ac2d-4d378b0f15a5" (UID: "3f4a042f-effa-4f5a-ac2d-4d378b0f15a5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.236996 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwl8p\" (UniqueName: \"kubernetes.io/projected/44264beb-2b14-47a9-9da4-18ca5e19d282-kube-api-access-lwl8p\") pod \"44264beb-2b14-47a9-9da4-18ca5e19d282\" (UID: \"44264beb-2b14-47a9-9da4-18ca5e19d282\") " Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.237053 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44264beb-2b14-47a9-9da4-18ca5e19d282-utilities\") pod \"44264beb-2b14-47a9-9da4-18ca5e19d282\" (UID: \"44264beb-2b14-47a9-9da4-18ca5e19d282\") " Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.237086 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44264beb-2b14-47a9-9da4-18ca5e19d282-catalog-content\") pod \"44264beb-2b14-47a9-9da4-18ca5e19d282\" (UID: \"44264beb-2b14-47a9-9da4-18ca5e19d282\") " Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.237306 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-494qt\" (UniqueName: \"kubernetes.io/projected/d037ebb5-19ab-471e-b627-3b0487dfa12c-kube-api-access-494qt\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.237325 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-664lq\" (UniqueName: \"kubernetes.io/projected/39df5500-d2c6-4c61-be65-cc5598f8201d-kube-api-access-664lq\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.237334 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.237343 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39df5500-d2c6-4c61-be65-cc5598f8201d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.237352 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.237361 4718 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d037ebb5-19ab-471e-b627-3b0487dfa12c-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.237371 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.238749 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44264beb-2b14-47a9-9da4-18ca5e19d282-utilities" (OuterVolumeSpecName: "utilities") pod "44264beb-2b14-47a9-9da4-18ca5e19d282" (UID: "44264beb-2b14-47a9-9da4-18ca5e19d282"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.240117 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44264beb-2b14-47a9-9da4-18ca5e19d282-kube-api-access-lwl8p" (OuterVolumeSpecName: "kube-api-access-lwl8p") pod "44264beb-2b14-47a9-9da4-18ca5e19d282" (UID: "44264beb-2b14-47a9-9da4-18ca5e19d282"). InnerVolumeSpecName "kube-api-access-lwl8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.296755 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44264beb-2b14-47a9-9da4-18ca5e19d282-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "44264beb-2b14-47a9-9da4-18ca5e19d282" (UID: "44264beb-2b14-47a9-9da4-18ca5e19d282"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.338432 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwl8p\" (UniqueName: \"kubernetes.io/projected/44264beb-2b14-47a9-9da4-18ca5e19d282-kube-api-access-lwl8p\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.338465 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44264beb-2b14-47a9-9da4-18ca5e19d282-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.338476 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44264beb-2b14-47a9-9da4-18ca5e19d282-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.482498 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zqgzd"] Nov 24 08:39:52 crc kubenswrapper[4718]: W1124 08:39:52.489003 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fb52afb_b455_44d0_ad14_36a8f2790af2.slice/crio-d956110ac19feeaed7b0682d72a450b5bdf52b38828f879510db9fbcaa84e282 WatchSource:0}: Error finding container d956110ac19feeaed7b0682d72a450b5bdf52b38828f879510db9fbcaa84e282: Status 404 returned error can't find the container with id d956110ac19feeaed7b0682d72a450b5bdf52b38828f879510db9fbcaa84e282 Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.634319 4718 generic.go:334] "Generic (PLEG): container finished" podID="d037ebb5-19ab-471e-b627-3b0487dfa12c" containerID="1d44db86d76946d29d4aacddf78eb37ec641c041219ce4a97e55e4621d050999" exitCode=0 Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.634439 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.634458 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" event={"ID":"d037ebb5-19ab-471e-b627-3b0487dfa12c","Type":"ContainerDied","Data":"1d44db86d76946d29d4aacddf78eb37ec641c041219ce4a97e55e4621d050999"} Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.634531 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-z4pwz" event={"ID":"d037ebb5-19ab-471e-b627-3b0487dfa12c","Type":"ContainerDied","Data":"1b3868e6a7d206d533d01d2a3bde1aefb24ff9fe482b014eb13c44ede23e8d50"} Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.634551 4718 scope.go:117] "RemoveContainer" containerID="1d44db86d76946d29d4aacddf78eb37ec641c041219ce4a97e55e4621d050999" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.636745 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-zqgzd" event={"ID":"5fb52afb-b455-44d0-ad14-36a8f2790af2","Type":"ContainerStarted","Data":"ca8d01b63026ffce96cb001680cf7ea4539bf60dde0f443bd7ffad21272acf40"} Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.636790 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-zqgzd" event={"ID":"5fb52afb-b455-44d0-ad14-36a8f2790af2","Type":"ContainerStarted","Data":"d956110ac19feeaed7b0682d72a450b5bdf52b38828f879510db9fbcaa84e282"} Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.636977 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-zqgzd" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.638624 4718 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-zqgzd container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" start-of-body= Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.638666 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-zqgzd" podUID="5fb52afb-b455-44d0-ad14-36a8f2790af2" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.639236 4718 generic.go:334] "Generic (PLEG): container finished" podID="39df5500-d2c6-4c61-be65-cc5598f8201d" containerID="df9fec50a376b40d04ce584c85cd9480a0dc77d378793ce49686185e5b9af75d" exitCode=0 Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.639289 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w8rfg" event={"ID":"39df5500-d2c6-4c61-be65-cc5598f8201d","Type":"ContainerDied","Data":"df9fec50a376b40d04ce584c85cd9480a0dc77d378793ce49686185e5b9af75d"} Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.639314 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w8rfg" event={"ID":"39df5500-d2c6-4c61-be65-cc5598f8201d","Type":"ContainerDied","Data":"fb12512064c63d06fa696e87f97a2ee8086ee56e9cbb9e0fd0fe12521d5b059d"} Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.639389 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w8rfg" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.647940 4718 generic.go:334] "Generic (PLEG): container finished" podID="44264beb-2b14-47a9-9da4-18ca5e19d282" containerID="66436d056290897fb2eec3859e284233c54d45823fc35ac04bf5781c559ed14f" exitCode=0 Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.648039 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9zgmp" event={"ID":"44264beb-2b14-47a9-9da4-18ca5e19d282","Type":"ContainerDied","Data":"66436d056290897fb2eec3859e284233c54d45823fc35ac04bf5781c559ed14f"} Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.648071 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9zgmp" event={"ID":"44264beb-2b14-47a9-9da4-18ca5e19d282","Type":"ContainerDied","Data":"58d26c92cdf7f82553653a6a4eb1fe0959db34d53913e8a0fab0b98c3c852557"} Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.648134 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9zgmp" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.658663 4718 generic.go:334] "Generic (PLEG): container finished" podID="0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16" containerID="8c9ccfa7d475d090583a91d4871f3dfecdaa41f6b6eccb1193c8dbaaa34d2bf1" exitCode=0 Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.658723 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qcdbm" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.658753 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qcdbm" event={"ID":"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16","Type":"ContainerDied","Data":"8c9ccfa7d475d090583a91d4871f3dfecdaa41f6b6eccb1193c8dbaaa34d2bf1"} Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.658776 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qcdbm" event={"ID":"0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16","Type":"ContainerDied","Data":"0a0e4c088cb8cf10292ce97ee87f3788833781459507b28b8f66775c268b8758"} Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.658801 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-zqgzd" podStartSLOduration=1.658768211 podStartE2EDuration="1.658768211s" podCreationTimestamp="2025-11-24 08:39:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:39:52.654503497 +0000 UTC m=+264.770794401" watchObservedRunningTime="2025-11-24 08:39:52.658768211 +0000 UTC m=+264.775059115" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.661923 4718 generic.go:334] "Generic (PLEG): container finished" podID="3f4a042f-effa-4f5a-ac2d-4d378b0f15a5" containerID="5c76ce6432a5bb91e4314caffe626fb0319a70d68b99387a1f95fde0e60321b1" exitCode=0 Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.661992 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zpbbq" event={"ID":"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5","Type":"ContainerDied","Data":"5c76ce6432a5bb91e4314caffe626fb0319a70d68b99387a1f95fde0e60321b1"} Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.662021 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zpbbq" event={"ID":"3f4a042f-effa-4f5a-ac2d-4d378b0f15a5","Type":"ContainerDied","Data":"d46d7e368fac22d40f6dc30b541ba34fa08db64dbde864fffad45335dcf5c8ff"} Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.662243 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zpbbq" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.664198 4718 scope.go:117] "RemoveContainer" containerID="1d44db86d76946d29d4aacddf78eb37ec641c041219ce4a97e55e4621d050999" Nov 24 08:39:52 crc kubenswrapper[4718]: E1124 08:39:52.664544 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d44db86d76946d29d4aacddf78eb37ec641c041219ce4a97e55e4621d050999\": container with ID starting with 1d44db86d76946d29d4aacddf78eb37ec641c041219ce4a97e55e4621d050999 not found: ID does not exist" containerID="1d44db86d76946d29d4aacddf78eb37ec641c041219ce4a97e55e4621d050999" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.664569 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d44db86d76946d29d4aacddf78eb37ec641c041219ce4a97e55e4621d050999"} err="failed to get container status \"1d44db86d76946d29d4aacddf78eb37ec641c041219ce4a97e55e4621d050999\": rpc error: code = NotFound desc = could not find container \"1d44db86d76946d29d4aacddf78eb37ec641c041219ce4a97e55e4621d050999\": container with ID starting with 1d44db86d76946d29d4aacddf78eb37ec641c041219ce4a97e55e4621d050999 not found: ID does not exist" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.664597 4718 scope.go:117] "RemoveContainer" containerID="df9fec50a376b40d04ce584c85cd9480a0dc77d378793ce49686185e5b9af75d" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.676314 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w8rfg"] Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.684217 4718 scope.go:117] "RemoveContainer" containerID="d447947dd9bedfe752e905efc1fbbc4e59f644c1f781e1ccbf52c6e10ff7a59f" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.684714 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-w8rfg"] Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.686883 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9zgmp"] Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.694458 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9zgmp"] Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.696386 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-z4pwz"] Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.698675 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-z4pwz"] Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.707772 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zpbbq"] Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.708961 4718 scope.go:117] "RemoveContainer" containerID="59d09d611f0cdc61e1aeeb9135ad9a73e4ec19be072acc457e95a9b485940763" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.714287 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zpbbq"] Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.723761 4718 scope.go:117] "RemoveContainer" containerID="df9fec50a376b40d04ce584c85cd9480a0dc77d378793ce49686185e5b9af75d" Nov 24 08:39:52 crc kubenswrapper[4718]: E1124 08:39:52.724411 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df9fec50a376b40d04ce584c85cd9480a0dc77d378793ce49686185e5b9af75d\": container with ID starting with df9fec50a376b40d04ce584c85cd9480a0dc77d378793ce49686185e5b9af75d not found: ID does not exist" containerID="df9fec50a376b40d04ce584c85cd9480a0dc77d378793ce49686185e5b9af75d" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.724459 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df9fec50a376b40d04ce584c85cd9480a0dc77d378793ce49686185e5b9af75d"} err="failed to get container status \"df9fec50a376b40d04ce584c85cd9480a0dc77d378793ce49686185e5b9af75d\": rpc error: code = NotFound desc = could not find container \"df9fec50a376b40d04ce584c85cd9480a0dc77d378793ce49686185e5b9af75d\": container with ID starting with df9fec50a376b40d04ce584c85cd9480a0dc77d378793ce49686185e5b9af75d not found: ID does not exist" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.724492 4718 scope.go:117] "RemoveContainer" containerID="d447947dd9bedfe752e905efc1fbbc4e59f644c1f781e1ccbf52c6e10ff7a59f" Nov 24 08:39:52 crc kubenswrapper[4718]: E1124 08:39:52.724931 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d447947dd9bedfe752e905efc1fbbc4e59f644c1f781e1ccbf52c6e10ff7a59f\": container with ID starting with d447947dd9bedfe752e905efc1fbbc4e59f644c1f781e1ccbf52c6e10ff7a59f not found: ID does not exist" containerID="d447947dd9bedfe752e905efc1fbbc4e59f644c1f781e1ccbf52c6e10ff7a59f" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.724988 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d447947dd9bedfe752e905efc1fbbc4e59f644c1f781e1ccbf52c6e10ff7a59f"} err="failed to get container status \"d447947dd9bedfe752e905efc1fbbc4e59f644c1f781e1ccbf52c6e10ff7a59f\": rpc error: code = NotFound desc = could not find container \"d447947dd9bedfe752e905efc1fbbc4e59f644c1f781e1ccbf52c6e10ff7a59f\": container with ID starting with d447947dd9bedfe752e905efc1fbbc4e59f644c1f781e1ccbf52c6e10ff7a59f not found: ID does not exist" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.725019 4718 scope.go:117] "RemoveContainer" containerID="59d09d611f0cdc61e1aeeb9135ad9a73e4ec19be072acc457e95a9b485940763" Nov 24 08:39:52 crc kubenswrapper[4718]: E1124 08:39:52.725315 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59d09d611f0cdc61e1aeeb9135ad9a73e4ec19be072acc457e95a9b485940763\": container with ID starting with 59d09d611f0cdc61e1aeeb9135ad9a73e4ec19be072acc457e95a9b485940763 not found: ID does not exist" containerID="59d09d611f0cdc61e1aeeb9135ad9a73e4ec19be072acc457e95a9b485940763" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.725355 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59d09d611f0cdc61e1aeeb9135ad9a73e4ec19be072acc457e95a9b485940763"} err="failed to get container status \"59d09d611f0cdc61e1aeeb9135ad9a73e4ec19be072acc457e95a9b485940763\": rpc error: code = NotFound desc = could not find container \"59d09d611f0cdc61e1aeeb9135ad9a73e4ec19be072acc457e95a9b485940763\": container with ID starting with 59d09d611f0cdc61e1aeeb9135ad9a73e4ec19be072acc457e95a9b485940763 not found: ID does not exist" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.725387 4718 scope.go:117] "RemoveContainer" containerID="66436d056290897fb2eec3859e284233c54d45823fc35ac04bf5781c559ed14f" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.727600 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qcdbm"] Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.733550 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qcdbm"] Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.744326 4718 scope.go:117] "RemoveContainer" containerID="58fc23eba20db52c1c741efcee838beef24cfaeea7e065704fb98e89aa81dc15" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.757762 4718 scope.go:117] "RemoveContainer" containerID="70ed657792d4e97b28b5606e313fb58e64d38e6cc896aa48dfd46efc9c5978df" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.772711 4718 scope.go:117] "RemoveContainer" containerID="66436d056290897fb2eec3859e284233c54d45823fc35ac04bf5781c559ed14f" Nov 24 08:39:52 crc kubenswrapper[4718]: E1124 08:39:52.773280 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66436d056290897fb2eec3859e284233c54d45823fc35ac04bf5781c559ed14f\": container with ID starting with 66436d056290897fb2eec3859e284233c54d45823fc35ac04bf5781c559ed14f not found: ID does not exist" containerID="66436d056290897fb2eec3859e284233c54d45823fc35ac04bf5781c559ed14f" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.773317 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66436d056290897fb2eec3859e284233c54d45823fc35ac04bf5781c559ed14f"} err="failed to get container status \"66436d056290897fb2eec3859e284233c54d45823fc35ac04bf5781c559ed14f\": rpc error: code = NotFound desc = could not find container \"66436d056290897fb2eec3859e284233c54d45823fc35ac04bf5781c559ed14f\": container with ID starting with 66436d056290897fb2eec3859e284233c54d45823fc35ac04bf5781c559ed14f not found: ID does not exist" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.773348 4718 scope.go:117] "RemoveContainer" containerID="58fc23eba20db52c1c741efcee838beef24cfaeea7e065704fb98e89aa81dc15" Nov 24 08:39:52 crc kubenswrapper[4718]: E1124 08:39:52.773654 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58fc23eba20db52c1c741efcee838beef24cfaeea7e065704fb98e89aa81dc15\": container with ID starting with 58fc23eba20db52c1c741efcee838beef24cfaeea7e065704fb98e89aa81dc15 not found: ID does not exist" containerID="58fc23eba20db52c1c741efcee838beef24cfaeea7e065704fb98e89aa81dc15" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.773687 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58fc23eba20db52c1c741efcee838beef24cfaeea7e065704fb98e89aa81dc15"} err="failed to get container status \"58fc23eba20db52c1c741efcee838beef24cfaeea7e065704fb98e89aa81dc15\": rpc error: code = NotFound desc = could not find container \"58fc23eba20db52c1c741efcee838beef24cfaeea7e065704fb98e89aa81dc15\": container with ID starting with 58fc23eba20db52c1c741efcee838beef24cfaeea7e065704fb98e89aa81dc15 not found: ID does not exist" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.773713 4718 scope.go:117] "RemoveContainer" containerID="70ed657792d4e97b28b5606e313fb58e64d38e6cc896aa48dfd46efc9c5978df" Nov 24 08:39:52 crc kubenswrapper[4718]: E1124 08:39:52.774713 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70ed657792d4e97b28b5606e313fb58e64d38e6cc896aa48dfd46efc9c5978df\": container with ID starting with 70ed657792d4e97b28b5606e313fb58e64d38e6cc896aa48dfd46efc9c5978df not found: ID does not exist" containerID="70ed657792d4e97b28b5606e313fb58e64d38e6cc896aa48dfd46efc9c5978df" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.774741 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70ed657792d4e97b28b5606e313fb58e64d38e6cc896aa48dfd46efc9c5978df"} err="failed to get container status \"70ed657792d4e97b28b5606e313fb58e64d38e6cc896aa48dfd46efc9c5978df\": rpc error: code = NotFound desc = could not find container \"70ed657792d4e97b28b5606e313fb58e64d38e6cc896aa48dfd46efc9c5978df\": container with ID starting with 70ed657792d4e97b28b5606e313fb58e64d38e6cc896aa48dfd46efc9c5978df not found: ID does not exist" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.774763 4718 scope.go:117] "RemoveContainer" containerID="8c9ccfa7d475d090583a91d4871f3dfecdaa41f6b6eccb1193c8dbaaa34d2bf1" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.813546 4718 scope.go:117] "RemoveContainer" containerID="d97d600f47a7452b23da1602adbb3845dad0b7e3477101c2fd89dc66e5ab6f16" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.831776 4718 scope.go:117] "RemoveContainer" containerID="3f73af9d5d4a5ed1e40e305540bca354d03cf203c9d548c4a06d724c38111dd9" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.845189 4718 scope.go:117] "RemoveContainer" containerID="8c9ccfa7d475d090583a91d4871f3dfecdaa41f6b6eccb1193c8dbaaa34d2bf1" Nov 24 08:39:52 crc kubenswrapper[4718]: E1124 08:39:52.845604 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c9ccfa7d475d090583a91d4871f3dfecdaa41f6b6eccb1193c8dbaaa34d2bf1\": container with ID starting with 8c9ccfa7d475d090583a91d4871f3dfecdaa41f6b6eccb1193c8dbaaa34d2bf1 not found: ID does not exist" containerID="8c9ccfa7d475d090583a91d4871f3dfecdaa41f6b6eccb1193c8dbaaa34d2bf1" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.845715 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c9ccfa7d475d090583a91d4871f3dfecdaa41f6b6eccb1193c8dbaaa34d2bf1"} err="failed to get container status \"8c9ccfa7d475d090583a91d4871f3dfecdaa41f6b6eccb1193c8dbaaa34d2bf1\": rpc error: code = NotFound desc = could not find container \"8c9ccfa7d475d090583a91d4871f3dfecdaa41f6b6eccb1193c8dbaaa34d2bf1\": container with ID starting with 8c9ccfa7d475d090583a91d4871f3dfecdaa41f6b6eccb1193c8dbaaa34d2bf1 not found: ID does not exist" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.845804 4718 scope.go:117] "RemoveContainer" containerID="d97d600f47a7452b23da1602adbb3845dad0b7e3477101c2fd89dc66e5ab6f16" Nov 24 08:39:52 crc kubenswrapper[4718]: E1124 08:39:52.846485 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d97d600f47a7452b23da1602adbb3845dad0b7e3477101c2fd89dc66e5ab6f16\": container with ID starting with d97d600f47a7452b23da1602adbb3845dad0b7e3477101c2fd89dc66e5ab6f16 not found: ID does not exist" containerID="d97d600f47a7452b23da1602adbb3845dad0b7e3477101c2fd89dc66e5ab6f16" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.846586 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d97d600f47a7452b23da1602adbb3845dad0b7e3477101c2fd89dc66e5ab6f16"} err="failed to get container status \"d97d600f47a7452b23da1602adbb3845dad0b7e3477101c2fd89dc66e5ab6f16\": rpc error: code = NotFound desc = could not find container \"d97d600f47a7452b23da1602adbb3845dad0b7e3477101c2fd89dc66e5ab6f16\": container with ID starting with d97d600f47a7452b23da1602adbb3845dad0b7e3477101c2fd89dc66e5ab6f16 not found: ID does not exist" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.846678 4718 scope.go:117] "RemoveContainer" containerID="3f73af9d5d4a5ed1e40e305540bca354d03cf203c9d548c4a06d724c38111dd9" Nov 24 08:39:52 crc kubenswrapper[4718]: E1124 08:39:52.847454 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f73af9d5d4a5ed1e40e305540bca354d03cf203c9d548c4a06d724c38111dd9\": container with ID starting with 3f73af9d5d4a5ed1e40e305540bca354d03cf203c9d548c4a06d724c38111dd9 not found: ID does not exist" containerID="3f73af9d5d4a5ed1e40e305540bca354d03cf203c9d548c4a06d724c38111dd9" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.847489 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f73af9d5d4a5ed1e40e305540bca354d03cf203c9d548c4a06d724c38111dd9"} err="failed to get container status \"3f73af9d5d4a5ed1e40e305540bca354d03cf203c9d548c4a06d724c38111dd9\": rpc error: code = NotFound desc = could not find container \"3f73af9d5d4a5ed1e40e305540bca354d03cf203c9d548c4a06d724c38111dd9\": container with ID starting with 3f73af9d5d4a5ed1e40e305540bca354d03cf203c9d548c4a06d724c38111dd9 not found: ID does not exist" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.847513 4718 scope.go:117] "RemoveContainer" containerID="5c76ce6432a5bb91e4314caffe626fb0319a70d68b99387a1f95fde0e60321b1" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.863376 4718 scope.go:117] "RemoveContainer" containerID="c95977bb249805c70a7e91414b7d8b00d41ae009536656e2a728acfb33127017" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.875702 4718 scope.go:117] "RemoveContainer" containerID="8947bec93eb0601876a82be203112baad59ea498cd5b11c032e7e7d738c5559f" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.888868 4718 scope.go:117] "RemoveContainer" containerID="5c76ce6432a5bb91e4314caffe626fb0319a70d68b99387a1f95fde0e60321b1" Nov 24 08:39:52 crc kubenswrapper[4718]: E1124 08:39:52.892727 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c76ce6432a5bb91e4314caffe626fb0319a70d68b99387a1f95fde0e60321b1\": container with ID starting with 5c76ce6432a5bb91e4314caffe626fb0319a70d68b99387a1f95fde0e60321b1 not found: ID does not exist" containerID="5c76ce6432a5bb91e4314caffe626fb0319a70d68b99387a1f95fde0e60321b1" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.892767 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c76ce6432a5bb91e4314caffe626fb0319a70d68b99387a1f95fde0e60321b1"} err="failed to get container status \"5c76ce6432a5bb91e4314caffe626fb0319a70d68b99387a1f95fde0e60321b1\": rpc error: code = NotFound desc = could not find container \"5c76ce6432a5bb91e4314caffe626fb0319a70d68b99387a1f95fde0e60321b1\": container with ID starting with 5c76ce6432a5bb91e4314caffe626fb0319a70d68b99387a1f95fde0e60321b1 not found: ID does not exist" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.892810 4718 scope.go:117] "RemoveContainer" containerID="c95977bb249805c70a7e91414b7d8b00d41ae009536656e2a728acfb33127017" Nov 24 08:39:52 crc kubenswrapper[4718]: E1124 08:39:52.893324 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c95977bb249805c70a7e91414b7d8b00d41ae009536656e2a728acfb33127017\": container with ID starting with c95977bb249805c70a7e91414b7d8b00d41ae009536656e2a728acfb33127017 not found: ID does not exist" containerID="c95977bb249805c70a7e91414b7d8b00d41ae009536656e2a728acfb33127017" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.893381 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c95977bb249805c70a7e91414b7d8b00d41ae009536656e2a728acfb33127017"} err="failed to get container status \"c95977bb249805c70a7e91414b7d8b00d41ae009536656e2a728acfb33127017\": rpc error: code = NotFound desc = could not find container \"c95977bb249805c70a7e91414b7d8b00d41ae009536656e2a728acfb33127017\": container with ID starting with c95977bb249805c70a7e91414b7d8b00d41ae009536656e2a728acfb33127017 not found: ID does not exist" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.893421 4718 scope.go:117] "RemoveContainer" containerID="8947bec93eb0601876a82be203112baad59ea498cd5b11c032e7e7d738c5559f" Nov 24 08:39:52 crc kubenswrapper[4718]: E1124 08:39:52.893873 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8947bec93eb0601876a82be203112baad59ea498cd5b11c032e7e7d738c5559f\": container with ID starting with 8947bec93eb0601876a82be203112baad59ea498cd5b11c032e7e7d738c5559f not found: ID does not exist" containerID="8947bec93eb0601876a82be203112baad59ea498cd5b11c032e7e7d738c5559f" Nov 24 08:39:52 crc kubenswrapper[4718]: I1124 08:39:52.893900 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8947bec93eb0601876a82be203112baad59ea498cd5b11c032e7e7d738c5559f"} err="failed to get container status \"8947bec93eb0601876a82be203112baad59ea498cd5b11c032e7e7d738c5559f\": rpc error: code = NotFound desc = could not find container \"8947bec93eb0601876a82be203112baad59ea498cd5b11c032e7e7d738c5559f\": container with ID starting with 8947bec93eb0601876a82be203112baad59ea498cd5b11c032e7e7d738c5559f not found: ID does not exist" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.674298 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-zqgzd" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.848194 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c5csg"] Nov 24 08:39:53 crc kubenswrapper[4718]: E1124 08:39:53.848589 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39df5500-d2c6-4c61-be65-cc5598f8201d" containerName="extract-utilities" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.848615 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="39df5500-d2c6-4c61-be65-cc5598f8201d" containerName="extract-utilities" Nov 24 08:39:53 crc kubenswrapper[4718]: E1124 08:39:53.848635 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16" containerName="extract-content" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.848645 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16" containerName="extract-content" Nov 24 08:39:53 crc kubenswrapper[4718]: E1124 08:39:53.848674 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f4a042f-effa-4f5a-ac2d-4d378b0f15a5" containerName="extract-utilities" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.848685 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f4a042f-effa-4f5a-ac2d-4d378b0f15a5" containerName="extract-utilities" Nov 24 08:39:53 crc kubenswrapper[4718]: E1124 08:39:53.848698 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d037ebb5-19ab-471e-b627-3b0487dfa12c" containerName="marketplace-operator" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.848707 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="d037ebb5-19ab-471e-b627-3b0487dfa12c" containerName="marketplace-operator" Nov 24 08:39:53 crc kubenswrapper[4718]: E1124 08:39:53.848721 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16" containerName="extract-utilities" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.848730 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16" containerName="extract-utilities" Nov 24 08:39:53 crc kubenswrapper[4718]: E1124 08:39:53.848740 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f4a042f-effa-4f5a-ac2d-4d378b0f15a5" containerName="extract-content" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.848749 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f4a042f-effa-4f5a-ac2d-4d378b0f15a5" containerName="extract-content" Nov 24 08:39:53 crc kubenswrapper[4718]: E1124 08:39:53.848765 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44264beb-2b14-47a9-9da4-18ca5e19d282" containerName="extract-utilities" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.848774 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="44264beb-2b14-47a9-9da4-18ca5e19d282" containerName="extract-utilities" Nov 24 08:39:53 crc kubenswrapper[4718]: E1124 08:39:53.848786 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44264beb-2b14-47a9-9da4-18ca5e19d282" containerName="extract-content" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.848795 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="44264beb-2b14-47a9-9da4-18ca5e19d282" containerName="extract-content" Nov 24 08:39:53 crc kubenswrapper[4718]: E1124 08:39:53.848807 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39df5500-d2c6-4c61-be65-cc5598f8201d" containerName="registry-server" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.848818 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="39df5500-d2c6-4c61-be65-cc5598f8201d" containerName="registry-server" Nov 24 08:39:53 crc kubenswrapper[4718]: E1124 08:39:53.848827 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39df5500-d2c6-4c61-be65-cc5598f8201d" containerName="extract-content" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.848841 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="39df5500-d2c6-4c61-be65-cc5598f8201d" containerName="extract-content" Nov 24 08:39:53 crc kubenswrapper[4718]: E1124 08:39:53.848851 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f4a042f-effa-4f5a-ac2d-4d378b0f15a5" containerName="registry-server" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.848862 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f4a042f-effa-4f5a-ac2d-4d378b0f15a5" containerName="registry-server" Nov 24 08:39:53 crc kubenswrapper[4718]: E1124 08:39:53.848874 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16" containerName="registry-server" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.848888 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16" containerName="registry-server" Nov 24 08:39:53 crc kubenswrapper[4718]: E1124 08:39:53.848902 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44264beb-2b14-47a9-9da4-18ca5e19d282" containerName="registry-server" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.848912 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="44264beb-2b14-47a9-9da4-18ca5e19d282" containerName="registry-server" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.849075 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="44264beb-2b14-47a9-9da4-18ca5e19d282" containerName="registry-server" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.850794 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16" containerName="registry-server" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.850816 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f4a042f-effa-4f5a-ac2d-4d378b0f15a5" containerName="registry-server" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.850832 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="d037ebb5-19ab-471e-b627-3b0487dfa12c" containerName="marketplace-operator" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.850841 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="39df5500-d2c6-4c61-be65-cc5598f8201d" containerName="registry-server" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.854144 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c5csg" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.859375 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c5csg"] Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.861447 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.972947 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b22c865e-0000-410a-a062-d994e40b806b-catalog-content\") pod \"certified-operators-c5csg\" (UID: \"b22c865e-0000-410a-a062-d994e40b806b\") " pod="openshift-marketplace/certified-operators-c5csg" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.974098 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b22c865e-0000-410a-a062-d994e40b806b-utilities\") pod \"certified-operators-c5csg\" (UID: \"b22c865e-0000-410a-a062-d994e40b806b\") " pod="openshift-marketplace/certified-operators-c5csg" Nov 24 08:39:53 crc kubenswrapper[4718]: I1124 08:39:53.974366 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bp7gm\" (UniqueName: \"kubernetes.io/projected/b22c865e-0000-410a-a062-d994e40b806b-kube-api-access-bp7gm\") pod \"certified-operators-c5csg\" (UID: \"b22c865e-0000-410a-a062-d994e40b806b\") " pod="openshift-marketplace/certified-operators-c5csg" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.048136 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bnkbp"] Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.049417 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bnkbp" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.053082 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.058529 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bnkbp"] Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.076304 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bp7gm\" (UniqueName: \"kubernetes.io/projected/b22c865e-0000-410a-a062-d994e40b806b-kube-api-access-bp7gm\") pod \"certified-operators-c5csg\" (UID: \"b22c865e-0000-410a-a062-d994e40b806b\") " pod="openshift-marketplace/certified-operators-c5csg" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.076698 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b22c865e-0000-410a-a062-d994e40b806b-catalog-content\") pod \"certified-operators-c5csg\" (UID: \"b22c865e-0000-410a-a062-d994e40b806b\") " pod="openshift-marketplace/certified-operators-c5csg" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.076903 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b22c865e-0000-410a-a062-d994e40b806b-utilities\") pod \"certified-operators-c5csg\" (UID: \"b22c865e-0000-410a-a062-d994e40b806b\") " pod="openshift-marketplace/certified-operators-c5csg" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.077230 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b22c865e-0000-410a-a062-d994e40b806b-catalog-content\") pod \"certified-operators-c5csg\" (UID: \"b22c865e-0000-410a-a062-d994e40b806b\") " pod="openshift-marketplace/certified-operators-c5csg" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.077910 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b22c865e-0000-410a-a062-d994e40b806b-utilities\") pod \"certified-operators-c5csg\" (UID: \"b22c865e-0000-410a-a062-d994e40b806b\") " pod="openshift-marketplace/certified-operators-c5csg" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.101063 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bp7gm\" (UniqueName: \"kubernetes.io/projected/b22c865e-0000-410a-a062-d994e40b806b-kube-api-access-bp7gm\") pod \"certified-operators-c5csg\" (UID: \"b22c865e-0000-410a-a062-d994e40b806b\") " pod="openshift-marketplace/certified-operators-c5csg" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.178401 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqc8j\" (UniqueName: \"kubernetes.io/projected/2f630e0b-0dbb-4498-a3dc-9cd80a7bf225-kube-api-access-kqc8j\") pod \"community-operators-bnkbp\" (UID: \"2f630e0b-0dbb-4498-a3dc-9cd80a7bf225\") " pod="openshift-marketplace/community-operators-bnkbp" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.178506 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f630e0b-0dbb-4498-a3dc-9cd80a7bf225-catalog-content\") pod \"community-operators-bnkbp\" (UID: \"2f630e0b-0dbb-4498-a3dc-9cd80a7bf225\") " pod="openshift-marketplace/community-operators-bnkbp" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.178591 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f630e0b-0dbb-4498-a3dc-9cd80a7bf225-utilities\") pod \"community-operators-bnkbp\" (UID: \"2f630e0b-0dbb-4498-a3dc-9cd80a7bf225\") " pod="openshift-marketplace/community-operators-bnkbp" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.181821 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c5csg" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.280254 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqc8j\" (UniqueName: \"kubernetes.io/projected/2f630e0b-0dbb-4498-a3dc-9cd80a7bf225-kube-api-access-kqc8j\") pod \"community-operators-bnkbp\" (UID: \"2f630e0b-0dbb-4498-a3dc-9cd80a7bf225\") " pod="openshift-marketplace/community-operators-bnkbp" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.280505 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f630e0b-0dbb-4498-a3dc-9cd80a7bf225-catalog-content\") pod \"community-operators-bnkbp\" (UID: \"2f630e0b-0dbb-4498-a3dc-9cd80a7bf225\") " pod="openshift-marketplace/community-operators-bnkbp" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.280570 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f630e0b-0dbb-4498-a3dc-9cd80a7bf225-utilities\") pod \"community-operators-bnkbp\" (UID: \"2f630e0b-0dbb-4498-a3dc-9cd80a7bf225\") " pod="openshift-marketplace/community-operators-bnkbp" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.281275 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f630e0b-0dbb-4498-a3dc-9cd80a7bf225-catalog-content\") pod \"community-operators-bnkbp\" (UID: \"2f630e0b-0dbb-4498-a3dc-9cd80a7bf225\") " pod="openshift-marketplace/community-operators-bnkbp" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.281460 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f630e0b-0dbb-4498-a3dc-9cd80a7bf225-utilities\") pod \"community-operators-bnkbp\" (UID: \"2f630e0b-0dbb-4498-a3dc-9cd80a7bf225\") " pod="openshift-marketplace/community-operators-bnkbp" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.299191 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqc8j\" (UniqueName: \"kubernetes.io/projected/2f630e0b-0dbb-4498-a3dc-9cd80a7bf225-kube-api-access-kqc8j\") pod \"community-operators-bnkbp\" (UID: \"2f630e0b-0dbb-4498-a3dc-9cd80a7bf225\") " pod="openshift-marketplace/community-operators-bnkbp" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.359257 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c5csg"] Nov 24 08:39:54 crc kubenswrapper[4718]: W1124 08:39:54.370776 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb22c865e_0000_410a_a062_d994e40b806b.slice/crio-837f9295fe868c071e0ad70cd47ae8d9507878ec2a231ad0eb840ab581044464 WatchSource:0}: Error finding container 837f9295fe868c071e0ad70cd47ae8d9507878ec2a231ad0eb840ab581044464: Status 404 returned error can't find the container with id 837f9295fe868c071e0ad70cd47ae8d9507878ec2a231ad0eb840ab581044464 Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.374670 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bnkbp" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.606077 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16" path="/var/lib/kubelet/pods/0cbd20a0-57cf-428b-b9c6-2ffb52ec5e16/volumes" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.607074 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39df5500-d2c6-4c61-be65-cc5598f8201d" path="/var/lib/kubelet/pods/39df5500-d2c6-4c61-be65-cc5598f8201d/volumes" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.607648 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f4a042f-effa-4f5a-ac2d-4d378b0f15a5" path="/var/lib/kubelet/pods/3f4a042f-effa-4f5a-ac2d-4d378b0f15a5/volumes" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.608663 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44264beb-2b14-47a9-9da4-18ca5e19d282" path="/var/lib/kubelet/pods/44264beb-2b14-47a9-9da4-18ca5e19d282/volumes" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.609283 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d037ebb5-19ab-471e-b627-3b0487dfa12c" path="/var/lib/kubelet/pods/d037ebb5-19ab-471e-b627-3b0487dfa12c/volumes" Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.654376 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bnkbp"] Nov 24 08:39:54 crc kubenswrapper[4718]: W1124 08:39:54.671436 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f630e0b_0dbb_4498_a3dc_9cd80a7bf225.slice/crio-2a0d967497db434b9522419f51f28f6de6b7073d39abbc3ccc968dc36a7f88ba WatchSource:0}: Error finding container 2a0d967497db434b9522419f51f28f6de6b7073d39abbc3ccc968dc36a7f88ba: Status 404 returned error can't find the container with id 2a0d967497db434b9522419f51f28f6de6b7073d39abbc3ccc968dc36a7f88ba Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.677689 4718 generic.go:334] "Generic (PLEG): container finished" podID="b22c865e-0000-410a-a062-d994e40b806b" containerID="db065ee12ac4625453fa7fe67045503176c593d603d6e790fe52adae8de89808" exitCode=0 Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.677772 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5csg" event={"ID":"b22c865e-0000-410a-a062-d994e40b806b","Type":"ContainerDied","Data":"db065ee12ac4625453fa7fe67045503176c593d603d6e790fe52adae8de89808"} Nov 24 08:39:54 crc kubenswrapper[4718]: I1124 08:39:54.677812 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5csg" event={"ID":"b22c865e-0000-410a-a062-d994e40b806b","Type":"ContainerStarted","Data":"837f9295fe868c071e0ad70cd47ae8d9507878ec2a231ad0eb840ab581044464"} Nov 24 08:39:55 crc kubenswrapper[4718]: I1124 08:39:55.684112 4718 generic.go:334] "Generic (PLEG): container finished" podID="b22c865e-0000-410a-a062-d994e40b806b" containerID="92e16bf9416f7577e7fe52920a026bd731089ebfa001db384b8f789e6effabd0" exitCode=0 Nov 24 08:39:55 crc kubenswrapper[4718]: I1124 08:39:55.684171 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5csg" event={"ID":"b22c865e-0000-410a-a062-d994e40b806b","Type":"ContainerDied","Data":"92e16bf9416f7577e7fe52920a026bd731089ebfa001db384b8f789e6effabd0"} Nov 24 08:39:55 crc kubenswrapper[4718]: I1124 08:39:55.687430 4718 generic.go:334] "Generic (PLEG): container finished" podID="2f630e0b-0dbb-4498-a3dc-9cd80a7bf225" containerID="c7756cdc4ed9049cc43adff0e8e5e584c4b3c0a585954c3500e02f30e1fb4142" exitCode=0 Nov 24 08:39:55 crc kubenswrapper[4718]: I1124 08:39:55.687634 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bnkbp" event={"ID":"2f630e0b-0dbb-4498-a3dc-9cd80a7bf225","Type":"ContainerDied","Data":"c7756cdc4ed9049cc43adff0e8e5e584c4b3c0a585954c3500e02f30e1fb4142"} Nov 24 08:39:55 crc kubenswrapper[4718]: I1124 08:39:55.687725 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bnkbp" event={"ID":"2f630e0b-0dbb-4498-a3dc-9cd80a7bf225","Type":"ContainerStarted","Data":"2a0d967497db434b9522419f51f28f6de6b7073d39abbc3ccc968dc36a7f88ba"} Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.242439 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gkskr"] Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.243962 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gkskr" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.247779 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.255026 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gkskr"] Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.304488 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcef040a-e828-443f-80c3-4a3956da53c2-utilities\") pod \"redhat-marketplace-gkskr\" (UID: \"bcef040a-e828-443f-80c3-4a3956da53c2\") " pod="openshift-marketplace/redhat-marketplace-gkskr" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.304525 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btrmd\" (UniqueName: \"kubernetes.io/projected/bcef040a-e828-443f-80c3-4a3956da53c2-kube-api-access-btrmd\") pod \"redhat-marketplace-gkskr\" (UID: \"bcef040a-e828-443f-80c3-4a3956da53c2\") " pod="openshift-marketplace/redhat-marketplace-gkskr" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.304550 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcef040a-e828-443f-80c3-4a3956da53c2-catalog-content\") pod \"redhat-marketplace-gkskr\" (UID: \"bcef040a-e828-443f-80c3-4a3956da53c2\") " pod="openshift-marketplace/redhat-marketplace-gkskr" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.405994 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcef040a-e828-443f-80c3-4a3956da53c2-utilities\") pod \"redhat-marketplace-gkskr\" (UID: \"bcef040a-e828-443f-80c3-4a3956da53c2\") " pod="openshift-marketplace/redhat-marketplace-gkskr" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.406042 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btrmd\" (UniqueName: \"kubernetes.io/projected/bcef040a-e828-443f-80c3-4a3956da53c2-kube-api-access-btrmd\") pod \"redhat-marketplace-gkskr\" (UID: \"bcef040a-e828-443f-80c3-4a3956da53c2\") " pod="openshift-marketplace/redhat-marketplace-gkskr" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.406069 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcef040a-e828-443f-80c3-4a3956da53c2-catalog-content\") pod \"redhat-marketplace-gkskr\" (UID: \"bcef040a-e828-443f-80c3-4a3956da53c2\") " pod="openshift-marketplace/redhat-marketplace-gkskr" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.406665 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcef040a-e828-443f-80c3-4a3956da53c2-catalog-content\") pod \"redhat-marketplace-gkskr\" (UID: \"bcef040a-e828-443f-80c3-4a3956da53c2\") " pod="openshift-marketplace/redhat-marketplace-gkskr" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.406717 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcef040a-e828-443f-80c3-4a3956da53c2-utilities\") pod \"redhat-marketplace-gkskr\" (UID: \"bcef040a-e828-443f-80c3-4a3956da53c2\") " pod="openshift-marketplace/redhat-marketplace-gkskr" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.433136 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btrmd\" (UniqueName: \"kubernetes.io/projected/bcef040a-e828-443f-80c3-4a3956da53c2-kube-api-access-btrmd\") pod \"redhat-marketplace-gkskr\" (UID: \"bcef040a-e828-443f-80c3-4a3956da53c2\") " pod="openshift-marketplace/redhat-marketplace-gkskr" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.441879 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-r7tnb"] Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.443000 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r7tnb" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.445668 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.450465 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r7tnb"] Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.507026 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa32947a-3fd8-4557-beb9-58c89792425a-utilities\") pod \"redhat-operators-r7tnb\" (UID: \"fa32947a-3fd8-4557-beb9-58c89792425a\") " pod="openshift-marketplace/redhat-operators-r7tnb" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.507337 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa32947a-3fd8-4557-beb9-58c89792425a-catalog-content\") pod \"redhat-operators-r7tnb\" (UID: \"fa32947a-3fd8-4557-beb9-58c89792425a\") " pod="openshift-marketplace/redhat-operators-r7tnb" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.507372 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhdw5\" (UniqueName: \"kubernetes.io/projected/fa32947a-3fd8-4557-beb9-58c89792425a-kube-api-access-mhdw5\") pod \"redhat-operators-r7tnb\" (UID: \"fa32947a-3fd8-4557-beb9-58c89792425a\") " pod="openshift-marketplace/redhat-operators-r7tnb" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.563178 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gkskr" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.609103 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa32947a-3fd8-4557-beb9-58c89792425a-utilities\") pod \"redhat-operators-r7tnb\" (UID: \"fa32947a-3fd8-4557-beb9-58c89792425a\") " pod="openshift-marketplace/redhat-operators-r7tnb" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.609184 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa32947a-3fd8-4557-beb9-58c89792425a-catalog-content\") pod \"redhat-operators-r7tnb\" (UID: \"fa32947a-3fd8-4557-beb9-58c89792425a\") " pod="openshift-marketplace/redhat-operators-r7tnb" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.609219 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhdw5\" (UniqueName: \"kubernetes.io/projected/fa32947a-3fd8-4557-beb9-58c89792425a-kube-api-access-mhdw5\") pod \"redhat-operators-r7tnb\" (UID: \"fa32947a-3fd8-4557-beb9-58c89792425a\") " pod="openshift-marketplace/redhat-operators-r7tnb" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.609564 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa32947a-3fd8-4557-beb9-58c89792425a-utilities\") pod \"redhat-operators-r7tnb\" (UID: \"fa32947a-3fd8-4557-beb9-58c89792425a\") " pod="openshift-marketplace/redhat-operators-r7tnb" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.609820 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa32947a-3fd8-4557-beb9-58c89792425a-catalog-content\") pod \"redhat-operators-r7tnb\" (UID: \"fa32947a-3fd8-4557-beb9-58c89792425a\") " pod="openshift-marketplace/redhat-operators-r7tnb" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.624903 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhdw5\" (UniqueName: \"kubernetes.io/projected/fa32947a-3fd8-4557-beb9-58c89792425a-kube-api-access-mhdw5\") pod \"redhat-operators-r7tnb\" (UID: \"fa32947a-3fd8-4557-beb9-58c89792425a\") " pod="openshift-marketplace/redhat-operators-r7tnb" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.696067 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5csg" event={"ID":"b22c865e-0000-410a-a062-d994e40b806b","Type":"ContainerStarted","Data":"40d0f9a78fcc2a80cbc7ec30c57af6461b7315b471271dbf72af7fb5facf986b"} Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.761237 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r7tnb" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.972195 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c5csg" podStartSLOduration=2.584894673 podStartE2EDuration="3.972174616s" podCreationTimestamp="2025-11-24 08:39:53 +0000 UTC" firstStartedPulling="2025-11-24 08:39:54.679007737 +0000 UTC m=+266.795298641" lastFinishedPulling="2025-11-24 08:39:56.06628767 +0000 UTC m=+268.182578584" observedRunningTime="2025-11-24 08:39:56.718117902 +0000 UTC m=+268.834408806" watchObservedRunningTime="2025-11-24 08:39:56.972174616 +0000 UTC m=+269.088465520" Nov 24 08:39:56 crc kubenswrapper[4718]: I1124 08:39:56.974270 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gkskr"] Nov 24 08:39:57 crc kubenswrapper[4718]: W1124 08:39:57.002827 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbcef040a_e828_443f_80c3_4a3956da53c2.slice/crio-3ab76f45b140f10945f0f481ec677e22e920107d40cadd26326258c205f2533f WatchSource:0}: Error finding container 3ab76f45b140f10945f0f481ec677e22e920107d40cadd26326258c205f2533f: Status 404 returned error can't find the container with id 3ab76f45b140f10945f0f481ec677e22e920107d40cadd26326258c205f2533f Nov 24 08:39:57 crc kubenswrapper[4718]: I1124 08:39:57.205289 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r7tnb"] Nov 24 08:39:57 crc kubenswrapper[4718]: W1124 08:39:57.213807 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa32947a_3fd8_4557_beb9_58c89792425a.slice/crio-ddb893c93ef6cc1ce87a1ec856cf4cb836082ca605b4584d2a7cf3366a56ac24 WatchSource:0}: Error finding container ddb893c93ef6cc1ce87a1ec856cf4cb836082ca605b4584d2a7cf3366a56ac24: Status 404 returned error can't find the container with id ddb893c93ef6cc1ce87a1ec856cf4cb836082ca605b4584d2a7cf3366a56ac24 Nov 24 08:39:57 crc kubenswrapper[4718]: I1124 08:39:57.702665 4718 generic.go:334] "Generic (PLEG): container finished" podID="2f630e0b-0dbb-4498-a3dc-9cd80a7bf225" containerID="47b9cdbf8c805c4cbfda8d954b9cb928529ae8496eeb1c90f55c3c1c8ac4c69e" exitCode=0 Nov 24 08:39:57 crc kubenswrapper[4718]: I1124 08:39:57.702786 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bnkbp" event={"ID":"2f630e0b-0dbb-4498-a3dc-9cd80a7bf225","Type":"ContainerDied","Data":"47b9cdbf8c805c4cbfda8d954b9cb928529ae8496eeb1c90f55c3c1c8ac4c69e"} Nov 24 08:39:57 crc kubenswrapper[4718]: I1124 08:39:57.704481 4718 generic.go:334] "Generic (PLEG): container finished" podID="bcef040a-e828-443f-80c3-4a3956da53c2" containerID="4f5181de8afb2c8f815eb679535db4a43b9bac456bb20c5d25bbd5b4e5e58b61" exitCode=0 Nov 24 08:39:57 crc kubenswrapper[4718]: I1124 08:39:57.704515 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gkskr" event={"ID":"bcef040a-e828-443f-80c3-4a3956da53c2","Type":"ContainerDied","Data":"4f5181de8afb2c8f815eb679535db4a43b9bac456bb20c5d25bbd5b4e5e58b61"} Nov 24 08:39:57 crc kubenswrapper[4718]: I1124 08:39:57.704555 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gkskr" event={"ID":"bcef040a-e828-443f-80c3-4a3956da53c2","Type":"ContainerStarted","Data":"3ab76f45b140f10945f0f481ec677e22e920107d40cadd26326258c205f2533f"} Nov 24 08:39:57 crc kubenswrapper[4718]: I1124 08:39:57.706740 4718 generic.go:334] "Generic (PLEG): container finished" podID="fa32947a-3fd8-4557-beb9-58c89792425a" containerID="51ead4c124d878868ee9afb449667c824c47f6572e435ed3f16124e839197aae" exitCode=0 Nov 24 08:39:57 crc kubenswrapper[4718]: I1124 08:39:57.706784 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r7tnb" event={"ID":"fa32947a-3fd8-4557-beb9-58c89792425a","Type":"ContainerDied","Data":"51ead4c124d878868ee9afb449667c824c47f6572e435ed3f16124e839197aae"} Nov 24 08:39:57 crc kubenswrapper[4718]: I1124 08:39:57.706821 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r7tnb" event={"ID":"fa32947a-3fd8-4557-beb9-58c89792425a","Type":"ContainerStarted","Data":"ddb893c93ef6cc1ce87a1ec856cf4cb836082ca605b4584d2a7cf3366a56ac24"} Nov 24 08:39:58 crc kubenswrapper[4718]: I1124 08:39:58.712846 4718 generic.go:334] "Generic (PLEG): container finished" podID="bcef040a-e828-443f-80c3-4a3956da53c2" containerID="9f40ae3aa0fcecb8ac0158a64d7526756412b6f4b347c2be69b1a07810f9381d" exitCode=0 Nov 24 08:39:58 crc kubenswrapper[4718]: I1124 08:39:58.713196 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gkskr" event={"ID":"bcef040a-e828-443f-80c3-4a3956da53c2","Type":"ContainerDied","Data":"9f40ae3aa0fcecb8ac0158a64d7526756412b6f4b347c2be69b1a07810f9381d"} Nov 24 08:39:58 crc kubenswrapper[4718]: I1124 08:39:58.717281 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r7tnb" event={"ID":"fa32947a-3fd8-4557-beb9-58c89792425a","Type":"ContainerStarted","Data":"be89de0fec6e7af1f31d22688e21b2551d3478fe8dd0668ed45c5b44f013290d"} Nov 24 08:39:58 crc kubenswrapper[4718]: I1124 08:39:58.719484 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bnkbp" event={"ID":"2f630e0b-0dbb-4498-a3dc-9cd80a7bf225","Type":"ContainerStarted","Data":"2f0d833a39260c0aec0cd3df48c033c17587a1bb702b59b58a3cc0ae567fbcb4"} Nov 24 08:39:58 crc kubenswrapper[4718]: I1124 08:39:58.756512 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bnkbp" podStartSLOduration=2.223378841 podStartE2EDuration="4.75649596s" podCreationTimestamp="2025-11-24 08:39:54 +0000 UTC" firstStartedPulling="2025-11-24 08:39:55.689895304 +0000 UTC m=+267.806186208" lastFinishedPulling="2025-11-24 08:39:58.223012423 +0000 UTC m=+270.339303327" observedRunningTime="2025-11-24 08:39:58.755135797 +0000 UTC m=+270.871426701" watchObservedRunningTime="2025-11-24 08:39:58.75649596 +0000 UTC m=+270.872786864" Nov 24 08:39:59 crc kubenswrapper[4718]: I1124 08:39:59.725928 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gkskr" event={"ID":"bcef040a-e828-443f-80c3-4a3956da53c2","Type":"ContainerStarted","Data":"aa705cfe88f7c962e510272e399f532270fa37f02ea147852c7644e03659e082"} Nov 24 08:39:59 crc kubenswrapper[4718]: I1124 08:39:59.728666 4718 generic.go:334] "Generic (PLEG): container finished" podID="fa32947a-3fd8-4557-beb9-58c89792425a" containerID="be89de0fec6e7af1f31d22688e21b2551d3478fe8dd0668ed45c5b44f013290d" exitCode=0 Nov 24 08:39:59 crc kubenswrapper[4718]: I1124 08:39:59.728730 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r7tnb" event={"ID":"fa32947a-3fd8-4557-beb9-58c89792425a","Type":"ContainerDied","Data":"be89de0fec6e7af1f31d22688e21b2551d3478fe8dd0668ed45c5b44f013290d"} Nov 24 08:39:59 crc kubenswrapper[4718]: I1124 08:39:59.745247 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gkskr" podStartSLOduration=2.243394378 podStartE2EDuration="3.745231635s" podCreationTimestamp="2025-11-24 08:39:56 +0000 UTC" firstStartedPulling="2025-11-24 08:39:57.706030538 +0000 UTC m=+269.822321452" lastFinishedPulling="2025-11-24 08:39:59.207867795 +0000 UTC m=+271.324158709" observedRunningTime="2025-11-24 08:39:59.743164865 +0000 UTC m=+271.859455779" watchObservedRunningTime="2025-11-24 08:39:59.745231635 +0000 UTC m=+271.861522539" Nov 24 08:40:01 crc kubenswrapper[4718]: I1124 08:40:01.740365 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r7tnb" event={"ID":"fa32947a-3fd8-4557-beb9-58c89792425a","Type":"ContainerStarted","Data":"2e49ac1c18e3b4899b3880ebc689372f3ba59627f425ff8c1fa32bb349fd5bf1"} Nov 24 08:40:01 crc kubenswrapper[4718]: I1124 08:40:01.758865 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-r7tnb" podStartSLOduration=3.094752486 podStartE2EDuration="5.758848389s" podCreationTimestamp="2025-11-24 08:39:56 +0000 UTC" firstStartedPulling="2025-11-24 08:39:57.711722667 +0000 UTC m=+269.828013581" lastFinishedPulling="2025-11-24 08:40:00.37581858 +0000 UTC m=+272.492109484" observedRunningTime="2025-11-24 08:40:01.758704686 +0000 UTC m=+273.874995590" watchObservedRunningTime="2025-11-24 08:40:01.758848389 +0000 UTC m=+273.875139293" Nov 24 08:40:04 crc kubenswrapper[4718]: I1124 08:40:04.182881 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c5csg" Nov 24 08:40:04 crc kubenswrapper[4718]: I1124 08:40:04.183212 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c5csg" Nov 24 08:40:04 crc kubenswrapper[4718]: I1124 08:40:04.219863 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c5csg" Nov 24 08:40:04 crc kubenswrapper[4718]: I1124 08:40:04.375119 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bnkbp" Nov 24 08:40:04 crc kubenswrapper[4718]: I1124 08:40:04.376020 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bnkbp" Nov 24 08:40:04 crc kubenswrapper[4718]: I1124 08:40:04.420444 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bnkbp" Nov 24 08:40:04 crc kubenswrapper[4718]: I1124 08:40:04.794809 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c5csg" Nov 24 08:40:04 crc kubenswrapper[4718]: I1124 08:40:04.797005 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bnkbp" Nov 24 08:40:06 crc kubenswrapper[4718]: I1124 08:40:06.563999 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gkskr" Nov 24 08:40:06 crc kubenswrapper[4718]: I1124 08:40:06.564403 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gkskr" Nov 24 08:40:06 crc kubenswrapper[4718]: I1124 08:40:06.603902 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gkskr" Nov 24 08:40:06 crc kubenswrapper[4718]: I1124 08:40:06.761833 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-r7tnb" Nov 24 08:40:06 crc kubenswrapper[4718]: I1124 08:40:06.763110 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-r7tnb" Nov 24 08:40:06 crc kubenswrapper[4718]: I1124 08:40:06.798934 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-r7tnb" Nov 24 08:40:06 crc kubenswrapper[4718]: I1124 08:40:06.805116 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gkskr" Nov 24 08:40:07 crc kubenswrapper[4718]: I1124 08:40:07.806337 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-r7tnb" Nov 24 08:41:22 crc kubenswrapper[4718]: I1124 08:41:22.045085 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:41:22 crc kubenswrapper[4718]: I1124 08:41:22.045615 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:41:52 crc kubenswrapper[4718]: I1124 08:41:52.045439 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:41:52 crc kubenswrapper[4718]: I1124 08:41:52.046595 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.203276 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fcr7n"] Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.206049 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.214055 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fcr7n"] Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.333902 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ec77e65c-53c8-4b33-93fd-85f93421628e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.333971 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.334039 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec77e65c-53c8-4b33-93fd-85f93421628e-trusted-ca\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.334063 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ec77e65c-53c8-4b33-93fd-85f93421628e-bound-sa-token\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.334093 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ec77e65c-53c8-4b33-93fd-85f93421628e-registry-certificates\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.334117 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ec77e65c-53c8-4b33-93fd-85f93421628e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.334143 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8h5t\" (UniqueName: \"kubernetes.io/projected/ec77e65c-53c8-4b33-93fd-85f93421628e-kube-api-access-g8h5t\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.334182 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ec77e65c-53c8-4b33-93fd-85f93421628e-registry-tls\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.350989 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.435694 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ec77e65c-53c8-4b33-93fd-85f93421628e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.435769 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec77e65c-53c8-4b33-93fd-85f93421628e-trusted-ca\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.435787 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ec77e65c-53c8-4b33-93fd-85f93421628e-bound-sa-token\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.435813 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ec77e65c-53c8-4b33-93fd-85f93421628e-registry-certificates\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.435836 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ec77e65c-53c8-4b33-93fd-85f93421628e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.435866 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ec77e65c-53c8-4b33-93fd-85f93421628e-registry-tls\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.435882 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8h5t\" (UniqueName: \"kubernetes.io/projected/ec77e65c-53c8-4b33-93fd-85f93421628e-kube-api-access-g8h5t\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.436449 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ec77e65c-53c8-4b33-93fd-85f93421628e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.436989 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ec77e65c-53c8-4b33-93fd-85f93421628e-registry-certificates\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.437079 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec77e65c-53c8-4b33-93fd-85f93421628e-trusted-ca\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.441853 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ec77e65c-53c8-4b33-93fd-85f93421628e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.441923 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ec77e65c-53c8-4b33-93fd-85f93421628e-registry-tls\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.450006 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ec77e65c-53c8-4b33-93fd-85f93421628e-bound-sa-token\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.450629 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8h5t\" (UniqueName: \"kubernetes.io/projected/ec77e65c-53c8-4b33-93fd-85f93421628e-kube-api-access-g8h5t\") pod \"image-registry-66df7c8f76-fcr7n\" (UID: \"ec77e65c-53c8-4b33-93fd-85f93421628e\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.531108 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:16 crc kubenswrapper[4718]: I1124 08:42:16.712384 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fcr7n"] Nov 24 08:42:16 crc kubenswrapper[4718]: W1124 08:42:16.719825 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podec77e65c_53c8_4b33_93fd_85f93421628e.slice/crio-846a2cfa8005e03e19ddcdea45412753498ea8661a5d992c9eb81f6b9d5d1004 WatchSource:0}: Error finding container 846a2cfa8005e03e19ddcdea45412753498ea8661a5d992c9eb81f6b9d5d1004: Status 404 returned error can't find the container with id 846a2cfa8005e03e19ddcdea45412753498ea8661a5d992c9eb81f6b9d5d1004 Nov 24 08:42:17 crc kubenswrapper[4718]: I1124 08:42:17.367862 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" event={"ID":"ec77e65c-53c8-4b33-93fd-85f93421628e","Type":"ContainerStarted","Data":"99a284ec56cc5755fad9875a40070a00bdab8bff25c1e8e1d969370c026eb76f"} Nov 24 08:42:17 crc kubenswrapper[4718]: I1124 08:42:17.367910 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" event={"ID":"ec77e65c-53c8-4b33-93fd-85f93421628e","Type":"ContainerStarted","Data":"846a2cfa8005e03e19ddcdea45412753498ea8661a5d992c9eb81f6b9d5d1004"} Nov 24 08:42:17 crc kubenswrapper[4718]: I1124 08:42:17.369234 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:17 crc kubenswrapper[4718]: I1124 08:42:17.386828 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" podStartSLOduration=1.386813638 podStartE2EDuration="1.386813638s" podCreationTimestamp="2025-11-24 08:42:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:42:17.38448431 +0000 UTC m=+409.500775224" watchObservedRunningTime="2025-11-24 08:42:17.386813638 +0000 UTC m=+409.503104542" Nov 24 08:42:22 crc kubenswrapper[4718]: I1124 08:42:22.045251 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:42:22 crc kubenswrapper[4718]: I1124 08:42:22.045633 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:42:22 crc kubenswrapper[4718]: I1124 08:42:22.045676 4718 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:42:22 crc kubenswrapper[4718]: I1124 08:42:22.046338 4718 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3a7ca29b97a51852d552c1a2d7e2bdb50bf9e50e07b800355266295362166198"} pod="openshift-machine-config-operator/machine-config-daemon-575gl" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 08:42:22 crc kubenswrapper[4718]: I1124 08:42:22.046399 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" containerID="cri-o://3a7ca29b97a51852d552c1a2d7e2bdb50bf9e50e07b800355266295362166198" gracePeriod=600 Nov 24 08:42:22 crc kubenswrapper[4718]: I1124 08:42:22.408512 4718 generic.go:334] "Generic (PLEG): container finished" podID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerID="3a7ca29b97a51852d552c1a2d7e2bdb50bf9e50e07b800355266295362166198" exitCode=0 Nov 24 08:42:22 crc kubenswrapper[4718]: I1124 08:42:22.408606 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerDied","Data":"3a7ca29b97a51852d552c1a2d7e2bdb50bf9e50e07b800355266295362166198"} Nov 24 08:42:22 crc kubenswrapper[4718]: I1124 08:42:22.408917 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerStarted","Data":"873c6f9762288e9dfb0f0664bf7a56f9f72a8fa6abf831277ce0db85d93a114f"} Nov 24 08:42:22 crc kubenswrapper[4718]: I1124 08:42:22.408942 4718 scope.go:117] "RemoveContainer" containerID="c837f919f14f1cd0511799645f58364683654da157a2e5789695bf2593b32f67" Nov 24 08:42:36 crc kubenswrapper[4718]: I1124 08:42:36.536165 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-fcr7n" Nov 24 08:42:36 crc kubenswrapper[4718]: I1124 08:42:36.576912 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-r2v9t"] Nov 24 08:43:01 crc kubenswrapper[4718]: I1124 08:43:01.622791 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" podUID="9fbd4dfd-1303-47a7-a2c9-3a093f9103cb" containerName="registry" containerID="cri-o://161b52c8266a4dcfef56e10381ca93a99b88f2762d394670a7b8151112df21dd" gracePeriod=30 Nov 24 08:43:01 crc kubenswrapper[4718]: I1124 08:43:01.998655 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.126794 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-bound-sa-token\") pod \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.126828 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6j8h\" (UniqueName: \"kubernetes.io/projected/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-kube-api-access-v6j8h\") pod \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.127008 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.127051 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-ca-trust-extracted\") pod \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.127077 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-registry-certificates\") pod \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.127103 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-trusted-ca\") pod \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.127168 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-installation-pull-secrets\") pod \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.127216 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-registry-tls\") pod \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\" (UID: \"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb\") " Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.128541 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.129159 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.134442 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.135251 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.135489 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-kube-api-access-v6j8h" (OuterVolumeSpecName: "kube-api-access-v6j8h") pod "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb"). InnerVolumeSpecName "kube-api-access-v6j8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.135818 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.145143 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.147911 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb" (UID: "9fbd4dfd-1303-47a7-a2c9-3a093f9103cb"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.229768 4718 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.229854 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6j8h\" (UniqueName: \"kubernetes.io/projected/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-kube-api-access-v6j8h\") on node \"crc\" DevicePath \"\"" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.229902 4718 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.229916 4718 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.229933 4718 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.230017 4718 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.230188 4718 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.616791 4718 generic.go:334] "Generic (PLEG): container finished" podID="9fbd4dfd-1303-47a7-a2c9-3a093f9103cb" containerID="161b52c8266a4dcfef56e10381ca93a99b88f2762d394670a7b8151112df21dd" exitCode=0 Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.616833 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" event={"ID":"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb","Type":"ContainerDied","Data":"161b52c8266a4dcfef56e10381ca93a99b88f2762d394670a7b8151112df21dd"} Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.616862 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.616884 4718 scope.go:117] "RemoveContainer" containerID="161b52c8266a4dcfef56e10381ca93a99b88f2762d394670a7b8151112df21dd" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.616872 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-r2v9t" event={"ID":"9fbd4dfd-1303-47a7-a2c9-3a093f9103cb","Type":"ContainerDied","Data":"56766967d3a6adcccd33675227c51c5eb8ba73365c2060e81187296a97573e34"} Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.635284 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-r2v9t"] Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.637587 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-r2v9t"] Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.639167 4718 scope.go:117] "RemoveContainer" containerID="161b52c8266a4dcfef56e10381ca93a99b88f2762d394670a7b8151112df21dd" Nov 24 08:43:02 crc kubenswrapper[4718]: E1124 08:43:02.640310 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"161b52c8266a4dcfef56e10381ca93a99b88f2762d394670a7b8151112df21dd\": container with ID starting with 161b52c8266a4dcfef56e10381ca93a99b88f2762d394670a7b8151112df21dd not found: ID does not exist" containerID="161b52c8266a4dcfef56e10381ca93a99b88f2762d394670a7b8151112df21dd" Nov 24 08:43:02 crc kubenswrapper[4718]: I1124 08:43:02.640345 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"161b52c8266a4dcfef56e10381ca93a99b88f2762d394670a7b8151112df21dd"} err="failed to get container status \"161b52c8266a4dcfef56e10381ca93a99b88f2762d394670a7b8151112df21dd\": rpc error: code = NotFound desc = could not find container \"161b52c8266a4dcfef56e10381ca93a99b88f2762d394670a7b8151112df21dd\": container with ID starting with 161b52c8266a4dcfef56e10381ca93a99b88f2762d394670a7b8151112df21dd not found: ID does not exist" Nov 24 08:43:04 crc kubenswrapper[4718]: I1124 08:43:04.602109 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fbd4dfd-1303-47a7-a2c9-3a093f9103cb" path="/var/lib/kubelet/pods/9fbd4dfd-1303-47a7-a2c9-3a093f9103cb/volumes" Nov 24 08:44:22 crc kubenswrapper[4718]: I1124 08:44:22.044900 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:44:22 crc kubenswrapper[4718]: I1124 08:44:22.045686 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:44:52 crc kubenswrapper[4718]: I1124 08:44:52.045087 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:44:52 crc kubenswrapper[4718]: I1124 08:44:52.045803 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:45:00 crc kubenswrapper[4718]: I1124 08:45:00.134918 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq"] Nov 24 08:45:00 crc kubenswrapper[4718]: E1124 08:45:00.135686 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fbd4dfd-1303-47a7-a2c9-3a093f9103cb" containerName="registry" Nov 24 08:45:00 crc kubenswrapper[4718]: I1124 08:45:00.135702 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fbd4dfd-1303-47a7-a2c9-3a093f9103cb" containerName="registry" Nov 24 08:45:00 crc kubenswrapper[4718]: I1124 08:45:00.135812 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fbd4dfd-1303-47a7-a2c9-3a093f9103cb" containerName="registry" Nov 24 08:45:00 crc kubenswrapper[4718]: I1124 08:45:00.136301 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq" Nov 24 08:45:00 crc kubenswrapper[4718]: I1124 08:45:00.138321 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 24 08:45:00 crc kubenswrapper[4718]: I1124 08:45:00.138416 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 24 08:45:00 crc kubenswrapper[4718]: I1124 08:45:00.145848 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq"] Nov 24 08:45:00 crc kubenswrapper[4718]: I1124 08:45:00.243811 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5d2be04f-d630-4050-93f6-5110c1e9878e-config-volume\") pod \"collect-profiles-29399565-97wdq\" (UID: \"5d2be04f-d630-4050-93f6-5110c1e9878e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq" Nov 24 08:45:00 crc kubenswrapper[4718]: I1124 08:45:00.243959 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5d2be04f-d630-4050-93f6-5110c1e9878e-secret-volume\") pod \"collect-profiles-29399565-97wdq\" (UID: \"5d2be04f-d630-4050-93f6-5110c1e9878e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq" Nov 24 08:45:00 crc kubenswrapper[4718]: I1124 08:45:00.244039 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjzkq\" (UniqueName: \"kubernetes.io/projected/5d2be04f-d630-4050-93f6-5110c1e9878e-kube-api-access-xjzkq\") pod \"collect-profiles-29399565-97wdq\" (UID: \"5d2be04f-d630-4050-93f6-5110c1e9878e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq" Nov 24 08:45:00 crc kubenswrapper[4718]: I1124 08:45:00.344527 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjzkq\" (UniqueName: \"kubernetes.io/projected/5d2be04f-d630-4050-93f6-5110c1e9878e-kube-api-access-xjzkq\") pod \"collect-profiles-29399565-97wdq\" (UID: \"5d2be04f-d630-4050-93f6-5110c1e9878e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq" Nov 24 08:45:00 crc kubenswrapper[4718]: I1124 08:45:00.344598 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5d2be04f-d630-4050-93f6-5110c1e9878e-config-volume\") pod \"collect-profiles-29399565-97wdq\" (UID: \"5d2be04f-d630-4050-93f6-5110c1e9878e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq" Nov 24 08:45:00 crc kubenswrapper[4718]: I1124 08:45:00.344680 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5d2be04f-d630-4050-93f6-5110c1e9878e-secret-volume\") pod \"collect-profiles-29399565-97wdq\" (UID: \"5d2be04f-d630-4050-93f6-5110c1e9878e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq" Nov 24 08:45:00 crc kubenswrapper[4718]: I1124 08:45:00.346141 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5d2be04f-d630-4050-93f6-5110c1e9878e-config-volume\") pod \"collect-profiles-29399565-97wdq\" (UID: \"5d2be04f-d630-4050-93f6-5110c1e9878e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq" Nov 24 08:45:00 crc kubenswrapper[4718]: I1124 08:45:00.350248 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5d2be04f-d630-4050-93f6-5110c1e9878e-secret-volume\") pod \"collect-profiles-29399565-97wdq\" (UID: \"5d2be04f-d630-4050-93f6-5110c1e9878e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq" Nov 24 08:45:00 crc kubenswrapper[4718]: I1124 08:45:00.360211 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjzkq\" (UniqueName: \"kubernetes.io/projected/5d2be04f-d630-4050-93f6-5110c1e9878e-kube-api-access-xjzkq\") pod \"collect-profiles-29399565-97wdq\" (UID: \"5d2be04f-d630-4050-93f6-5110c1e9878e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq" Nov 24 08:45:00 crc kubenswrapper[4718]: I1124 08:45:00.454193 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq" Nov 24 08:45:00 crc kubenswrapper[4718]: I1124 08:45:00.847177 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq"] Nov 24 08:45:01 crc kubenswrapper[4718]: I1124 08:45:01.203069 4718 generic.go:334] "Generic (PLEG): container finished" podID="5d2be04f-d630-4050-93f6-5110c1e9878e" containerID="ecac81a2d940483fd7914452ad8d7791dcaa66401d569aced9f42885a9a4003b" exitCode=0 Nov 24 08:45:01 crc kubenswrapper[4718]: I1124 08:45:01.203123 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq" event={"ID":"5d2be04f-d630-4050-93f6-5110c1e9878e","Type":"ContainerDied","Data":"ecac81a2d940483fd7914452ad8d7791dcaa66401d569aced9f42885a9a4003b"} Nov 24 08:45:01 crc kubenswrapper[4718]: I1124 08:45:01.203151 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq" event={"ID":"5d2be04f-d630-4050-93f6-5110c1e9878e","Type":"ContainerStarted","Data":"72ecfb621bb51227615b7cb8796e0731e6dd09fd348d0ac1478dde2a0c312910"} Nov 24 08:45:02 crc kubenswrapper[4718]: I1124 08:45:02.432052 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq" Nov 24 08:45:02 crc kubenswrapper[4718]: I1124 08:45:02.471422 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5d2be04f-d630-4050-93f6-5110c1e9878e-config-volume\") pod \"5d2be04f-d630-4050-93f6-5110c1e9878e\" (UID: \"5d2be04f-d630-4050-93f6-5110c1e9878e\") " Nov 24 08:45:02 crc kubenswrapper[4718]: I1124 08:45:02.471492 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjzkq\" (UniqueName: \"kubernetes.io/projected/5d2be04f-d630-4050-93f6-5110c1e9878e-kube-api-access-xjzkq\") pod \"5d2be04f-d630-4050-93f6-5110c1e9878e\" (UID: \"5d2be04f-d630-4050-93f6-5110c1e9878e\") " Nov 24 08:45:02 crc kubenswrapper[4718]: I1124 08:45:02.472268 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d2be04f-d630-4050-93f6-5110c1e9878e-config-volume" (OuterVolumeSpecName: "config-volume") pod "5d2be04f-d630-4050-93f6-5110c1e9878e" (UID: "5d2be04f-d630-4050-93f6-5110c1e9878e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:45:02 crc kubenswrapper[4718]: I1124 08:45:02.476212 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d2be04f-d630-4050-93f6-5110c1e9878e-kube-api-access-xjzkq" (OuterVolumeSpecName: "kube-api-access-xjzkq") pod "5d2be04f-d630-4050-93f6-5110c1e9878e" (UID: "5d2be04f-d630-4050-93f6-5110c1e9878e"). InnerVolumeSpecName "kube-api-access-xjzkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:45:02 crc kubenswrapper[4718]: I1124 08:45:02.572413 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5d2be04f-d630-4050-93f6-5110c1e9878e-secret-volume\") pod \"5d2be04f-d630-4050-93f6-5110c1e9878e\" (UID: \"5d2be04f-d630-4050-93f6-5110c1e9878e\") " Nov 24 08:45:02 crc kubenswrapper[4718]: I1124 08:45:02.572668 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjzkq\" (UniqueName: \"kubernetes.io/projected/5d2be04f-d630-4050-93f6-5110c1e9878e-kube-api-access-xjzkq\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:02 crc kubenswrapper[4718]: I1124 08:45:02.572680 4718 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5d2be04f-d630-4050-93f6-5110c1e9878e-config-volume\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:02 crc kubenswrapper[4718]: I1124 08:45:02.575649 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d2be04f-d630-4050-93f6-5110c1e9878e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5d2be04f-d630-4050-93f6-5110c1e9878e" (UID: "5d2be04f-d630-4050-93f6-5110c1e9878e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:45:02 crc kubenswrapper[4718]: I1124 08:45:02.673467 4718 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5d2be04f-d630-4050-93f6-5110c1e9878e-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:03 crc kubenswrapper[4718]: I1124 08:45:03.220304 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq" event={"ID":"5d2be04f-d630-4050-93f6-5110c1e9878e","Type":"ContainerDied","Data":"72ecfb621bb51227615b7cb8796e0731e6dd09fd348d0ac1478dde2a0c312910"} Nov 24 08:45:03 crc kubenswrapper[4718]: I1124 08:45:03.220370 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72ecfb621bb51227615b7cb8796e0731e6dd09fd348d0ac1478dde2a0c312910" Nov 24 08:45:03 crc kubenswrapper[4718]: I1124 08:45:03.220335 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399565-97wdq" Nov 24 08:45:19 crc kubenswrapper[4718]: I1124 08:45:19.797316 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2lk4b"] Nov 24 08:45:19 crc kubenswrapper[4718]: I1124 08:45:19.798191 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovn-controller" containerID="cri-o://84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916" gracePeriod=30 Nov 24 08:45:19 crc kubenswrapper[4718]: I1124 08:45:19.798530 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="sbdb" containerID="cri-o://b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0" gracePeriod=30 Nov 24 08:45:19 crc kubenswrapper[4718]: I1124 08:45:19.798576 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="nbdb" containerID="cri-o://a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035" gracePeriod=30 Nov 24 08:45:19 crc kubenswrapper[4718]: I1124 08:45:19.798623 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="northd" containerID="cri-o://f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa" gracePeriod=30 Nov 24 08:45:19 crc kubenswrapper[4718]: I1124 08:45:19.798661 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0" gracePeriod=30 Nov 24 08:45:19 crc kubenswrapper[4718]: I1124 08:45:19.798738 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="kube-rbac-proxy-node" containerID="cri-o://9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96" gracePeriod=30 Nov 24 08:45:19 crc kubenswrapper[4718]: I1124 08:45:19.798788 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovn-acl-logging" containerID="cri-o://8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d" gracePeriod=30 Nov 24 08:45:19 crc kubenswrapper[4718]: I1124 08:45:19.839552 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovnkube-controller" containerID="cri-o://bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9" gracePeriod=30 Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.034370 4718 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035 is running failed: container process not found" containerID="a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.034851 4718 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0 is running failed: container process not found" containerID="b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.035038 4718 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035 is running failed: container process not found" containerID="a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.035279 4718 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035 is running failed: container process not found" containerID="a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.035319 4718 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035 is running failed: container process not found" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="nbdb" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.035286 4718 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0 is running failed: container process not found" containerID="b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.035608 4718 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0 is running failed: container process not found" containerID="b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.035641 4718 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0 is running failed: container process not found" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="sbdb" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.072161 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovnkube-controller/3.log" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.075279 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovn-acl-logging/0.log" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.075803 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovn-controller/0.log" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.076379 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.084752 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-ovnkube-script-lib\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.084815 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-run-ovn\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.084857 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-cni-netd\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.084878 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-log-socket\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.084901 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-kubelet\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.084915 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.084931 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-ovnkube-config\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.084958 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-log-socket" (OuterVolumeSpecName: "log-socket") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.084960 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-run-netns\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085000 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-cni-bin\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085033 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-ovn-node-metrics-cert\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.084956 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085007 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085035 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085068 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-run-openvswitch\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085097 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085111 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-var-lib-cni-networks-ovn-kubernetes\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085126 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085138 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-slash\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085162 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-etc-openvswitch\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085184 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-env-overrides\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085212 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tq976\" (UniqueName: \"kubernetes.io/projected/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-kube-api-access-tq976\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085264 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-run-systemd\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085291 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-var-lib-openvswitch\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085321 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-node-log\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085332 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085339 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-run-ovn-kubernetes\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085363 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085380 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-systemd-units\") pod \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\" (UID: \"692d15f5-2875-47c6-92e3-3c99bfd6b7ea\") " Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085369 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085487 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085378 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-slash" (OuterVolumeSpecName: "host-slash") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085405 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-node-log" (OuterVolumeSpecName: "node-log") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085428 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085453 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085858 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085883 4718 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085911 4718 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-node-log\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085931 4718 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-systemd-units\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085948 4718 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.085987 4718 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-cni-netd\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.086004 4718 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-log-socket\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.086020 4718 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-kubelet\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.086037 4718 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.086054 4718 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-run-netns\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.086071 4718 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-cni-bin\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.086087 4718 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-run-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.086104 4718 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.086123 4718 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-host-slash\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.086142 4718 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.086161 4718 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.086511 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.090627 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-kube-api-access-tq976" (OuterVolumeSpecName: "kube-api-access-tq976") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "kube-api-access-tq976". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.090711 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.098633 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "692d15f5-2875-47c6-92e3-3c99bfd6b7ea" (UID: "692d15f5-2875-47c6-92e3-3c99bfd6b7ea"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.126870 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-mwbgr"] Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.127845 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="kube-rbac-proxy-node" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.127872 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="kube-rbac-proxy-node" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.127888 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d2be04f-d630-4050-93f6-5110c1e9878e" containerName="collect-profiles" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.127897 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d2be04f-d630-4050-93f6-5110c1e9878e" containerName="collect-profiles" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.127909 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovn-controller" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.127918 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovn-controller" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.127933 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="nbdb" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.127941 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="nbdb" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.127957 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="sbdb" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.127985 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="sbdb" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.127998 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovnkube-controller" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128006 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovnkube-controller" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.128017 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="kube-rbac-proxy-ovn-metrics" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128025 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="kube-rbac-proxy-ovn-metrics" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.128035 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovnkube-controller" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128043 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovnkube-controller" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.128052 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovnkube-controller" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128059 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovnkube-controller" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.128072 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovnkube-controller" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128080 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovnkube-controller" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.128092 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovn-acl-logging" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128100 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovn-acl-logging" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.128115 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="kubecfg-setup" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128123 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="kubecfg-setup" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.128135 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="northd" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128143 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="northd" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128295 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="northd" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128310 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovn-acl-logging" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128321 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovnkube-controller" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128332 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="nbdb" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128343 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovnkube-controller" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128351 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovnkube-controller" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128363 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d2be04f-d630-4050-93f6-5110c1e9878e" containerName="collect-profiles" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128373 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovn-controller" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128385 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="kube-rbac-proxy-node" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128397 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="kube-rbac-proxy-ovn-metrics" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128407 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="sbdb" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.128524 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovnkube-controller" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128534 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovnkube-controller" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128642 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovnkube-controller" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.128850 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerName="ovnkube-controller" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.131115 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.186656 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-run-systemd\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.186712 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.186738 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-kubelet\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.186762 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-var-lib-openvswitch\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.186783 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e42afd35-3a21-4488-bf70-25bc69f6abdd-ovnkube-config\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.186844 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-slash\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.186875 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-etc-openvswitch\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.186934 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-node-log\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.186956 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-cni-netd\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.187019 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-run-ovn-kubernetes\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.187043 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e42afd35-3a21-4488-bf70-25bc69f6abdd-ovnkube-script-lib\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.187266 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-cni-bin\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.187301 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e42afd35-3a21-4488-bf70-25bc69f6abdd-ovn-node-metrics-cert\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.187329 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxrms\" (UniqueName: \"kubernetes.io/projected/e42afd35-3a21-4488-bf70-25bc69f6abdd-kube-api-access-sxrms\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.187354 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-run-openvswitch\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.187376 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-systemd-units\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.187402 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-log-socket\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.187424 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-run-ovn\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.187443 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e42afd35-3a21-4488-bf70-25bc69f6abdd-env-overrides\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.187465 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-run-netns\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.187509 4718 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.187525 4718 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.187539 4718 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.187552 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tq976\" (UniqueName: \"kubernetes.io/projected/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-kube-api-access-tq976\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.187564 4718 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/692d15f5-2875-47c6-92e3-3c99bfd6b7ea-run-systemd\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.288520 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-node-log\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.288646 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-cni-netd\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.288600 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-node-log\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.288672 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-run-ovn-kubernetes\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.288736 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e42afd35-3a21-4488-bf70-25bc69f6abdd-ovnkube-script-lib\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.288788 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-cni-netd\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.288788 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-run-ovn-kubernetes\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.288892 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-cni-bin\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.289465 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e42afd35-3a21-4488-bf70-25bc69f6abdd-ovnkube-script-lib\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.289501 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-cni-bin\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.289535 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e42afd35-3a21-4488-bf70-25bc69f6abdd-ovn-node-metrics-cert\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.289559 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxrms\" (UniqueName: \"kubernetes.io/projected/e42afd35-3a21-4488-bf70-25bc69f6abdd-kube-api-access-sxrms\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290021 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-run-openvswitch\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290054 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-systemd-units\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290075 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-log-socket\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290081 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-run-openvswitch\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290112 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-run-ovn\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290090 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-run-ovn\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290132 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-systemd-units\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290132 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-log-socket\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290177 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e42afd35-3a21-4488-bf70-25bc69f6abdd-env-overrides\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290221 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-run-netns\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290249 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-run-systemd\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290274 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-kubelet\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290282 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-run-netns\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290297 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-run-systemd\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290298 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290324 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-kubelet\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290335 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-var-lib-openvswitch\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290362 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e42afd35-3a21-4488-bf70-25bc69f6abdd-ovnkube-config\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290360 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290394 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-var-lib-openvswitch\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290394 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-slash\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290421 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-host-slash\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290442 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-etc-openvswitch\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290504 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e42afd35-3a21-4488-bf70-25bc69f6abdd-etc-openvswitch\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290701 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e42afd35-3a21-4488-bf70-25bc69f6abdd-env-overrides\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.290848 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e42afd35-3a21-4488-bf70-25bc69f6abdd-ovnkube-config\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.292400 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e42afd35-3a21-4488-bf70-25bc69f6abdd-ovn-node-metrics-cert\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.299300 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zvlvh_811ba3ee-aad5-427c-84f7-fbd3b78255ec/kube-multus/2.log" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.299696 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zvlvh_811ba3ee-aad5-427c-84f7-fbd3b78255ec/kube-multus/1.log" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.299735 4718 generic.go:334] "Generic (PLEG): container finished" podID="811ba3ee-aad5-427c-84f7-fbd3b78255ec" containerID="d8ca5e7bf3e442fc225cdf16965d3a1960705e567187f8aae5a8e47e781c4a3c" exitCode=2 Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.299786 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zvlvh" event={"ID":"811ba3ee-aad5-427c-84f7-fbd3b78255ec","Type":"ContainerDied","Data":"d8ca5e7bf3e442fc225cdf16965d3a1960705e567187f8aae5a8e47e781c4a3c"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.299824 4718 scope.go:117] "RemoveContainer" containerID="308a885775330d5e1550c9c740909b7bb1bbff451fa1d5b196af6263de715424" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.300285 4718 scope.go:117] "RemoveContainer" containerID="d8ca5e7bf3e442fc225cdf16965d3a1960705e567187f8aae5a8e47e781c4a3c" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.300442 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-zvlvh_openshift-multus(811ba3ee-aad5-427c-84f7-fbd3b78255ec)\"" pod="openshift-multus/multus-zvlvh" podUID="811ba3ee-aad5-427c-84f7-fbd3b78255ec" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.303205 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovnkube-controller/3.log" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.304617 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxrms\" (UniqueName: \"kubernetes.io/projected/e42afd35-3a21-4488-bf70-25bc69f6abdd-kube-api-access-sxrms\") pod \"ovnkube-node-mwbgr\" (UID: \"e42afd35-3a21-4488-bf70-25bc69f6abdd\") " pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.306098 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovn-acl-logging/0.log" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.307007 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lk4b_692d15f5-2875-47c6-92e3-3c99bfd6b7ea/ovn-controller/0.log" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.312829 4718 generic.go:334] "Generic (PLEG): container finished" podID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerID="bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9" exitCode=0 Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.312860 4718 generic.go:334] "Generic (PLEG): container finished" podID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerID="b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0" exitCode=0 Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.312871 4718 generic.go:334] "Generic (PLEG): container finished" podID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerID="a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035" exitCode=0 Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.312884 4718 generic.go:334] "Generic (PLEG): container finished" podID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerID="f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa" exitCode=0 Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.312897 4718 generic.go:334] "Generic (PLEG): container finished" podID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerID="929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0" exitCode=0 Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.312906 4718 generic.go:334] "Generic (PLEG): container finished" podID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerID="9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96" exitCode=0 Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.312915 4718 generic.go:334] "Generic (PLEG): container finished" podID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerID="8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d" exitCode=143 Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.312925 4718 generic.go:334] "Generic (PLEG): container finished" podID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" containerID="84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916" exitCode=143 Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.312905 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerDied","Data":"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.312942 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.312966 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerDied","Data":"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313017 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerDied","Data":"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313032 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerDied","Data":"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313045 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerDied","Data":"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313058 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerDied","Data":"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313077 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313091 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313098 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313106 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313113 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313120 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313127 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313135 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313141 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313149 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313158 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerDied","Data":"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313169 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313177 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313185 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313194 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313200 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313208 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313215 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313222 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313231 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313238 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313248 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerDied","Data":"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313259 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313269 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313279 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313289 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313298 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313308 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313318 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313328 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313337 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313346 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313359 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lk4b" event={"ID":"692d15f5-2875-47c6-92e3-3c99bfd6b7ea","Type":"ContainerDied","Data":"9bf2ed57b6656aba8f69321f2f6b77c74f953e9d49a2a7dd16a4a3af519e8156"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313373 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313384 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313394 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313403 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313412 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313443 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313451 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313458 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313467 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.313474 4718 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632"} Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.331924 4718 scope.go:117] "RemoveContainer" containerID="bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.344854 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2lk4b"] Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.347109 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2lk4b"] Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.352238 4718 scope.go:117] "RemoveContainer" containerID="14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.377919 4718 scope.go:117] "RemoveContainer" containerID="b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.392458 4718 scope.go:117] "RemoveContainer" containerID="a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.403773 4718 scope.go:117] "RemoveContainer" containerID="f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.414270 4718 scope.go:117] "RemoveContainer" containerID="929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.424665 4718 scope.go:117] "RemoveContainer" containerID="9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.435765 4718 scope.go:117] "RemoveContainer" containerID="8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.444814 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.446378 4718 scope.go:117] "RemoveContainer" containerID="84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.457315 4718 scope.go:117] "RemoveContainer" containerID="5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.472209 4718 scope.go:117] "RemoveContainer" containerID="bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.472570 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9\": container with ID starting with bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9 not found: ID does not exist" containerID="bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.472604 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9"} err="failed to get container status \"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9\": rpc error: code = NotFound desc = could not find container \"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9\": container with ID starting with bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.472624 4718 scope.go:117] "RemoveContainer" containerID="14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.472879 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\": container with ID starting with 14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622 not found: ID does not exist" containerID="14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.472903 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622"} err="failed to get container status \"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\": rpc error: code = NotFound desc = could not find container \"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\": container with ID starting with 14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.472919 4718 scope.go:117] "RemoveContainer" containerID="b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.473441 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\": container with ID starting with b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0 not found: ID does not exist" containerID="b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.473466 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0"} err="failed to get container status \"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\": rpc error: code = NotFound desc = could not find container \"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\": container with ID starting with b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.473485 4718 scope.go:117] "RemoveContainer" containerID="a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.473747 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\": container with ID starting with a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035 not found: ID does not exist" containerID="a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.473768 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035"} err="failed to get container status \"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\": rpc error: code = NotFound desc = could not find container \"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\": container with ID starting with a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.473784 4718 scope.go:117] "RemoveContainer" containerID="f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.474045 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\": container with ID starting with f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa not found: ID does not exist" containerID="f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.474071 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa"} err="failed to get container status \"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\": rpc error: code = NotFound desc = could not find container \"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\": container with ID starting with f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.474086 4718 scope.go:117] "RemoveContainer" containerID="929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.474337 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\": container with ID starting with 929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0 not found: ID does not exist" containerID="929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.474357 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0"} err="failed to get container status \"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\": rpc error: code = NotFound desc = could not find container \"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\": container with ID starting with 929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.474373 4718 scope.go:117] "RemoveContainer" containerID="9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.474606 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\": container with ID starting with 9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96 not found: ID does not exist" containerID="9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.474629 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96"} err="failed to get container status \"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\": rpc error: code = NotFound desc = could not find container \"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\": container with ID starting with 9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.474645 4718 scope.go:117] "RemoveContainer" containerID="8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.474862 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\": container with ID starting with 8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d not found: ID does not exist" containerID="8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.474885 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d"} err="failed to get container status \"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\": rpc error: code = NotFound desc = could not find container \"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\": container with ID starting with 8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.474897 4718 scope.go:117] "RemoveContainer" containerID="84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.475104 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\": container with ID starting with 84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916 not found: ID does not exist" containerID="84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.475128 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916"} err="failed to get container status \"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\": rpc error: code = NotFound desc = could not find container \"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\": container with ID starting with 84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.475143 4718 scope.go:117] "RemoveContainer" containerID="5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632" Nov 24 08:45:20 crc kubenswrapper[4718]: E1124 08:45:20.475484 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\": container with ID starting with 5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632 not found: ID does not exist" containerID="5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.475507 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632"} err="failed to get container status \"5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\": rpc error: code = NotFound desc = could not find container \"5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\": container with ID starting with 5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.475524 4718 scope.go:117] "RemoveContainer" containerID="bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.475752 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9"} err="failed to get container status \"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9\": rpc error: code = NotFound desc = could not find container \"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9\": container with ID starting with bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.475770 4718 scope.go:117] "RemoveContainer" containerID="14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.476009 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622"} err="failed to get container status \"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\": rpc error: code = NotFound desc = could not find container \"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\": container with ID starting with 14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.476076 4718 scope.go:117] "RemoveContainer" containerID="b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.476391 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0"} err="failed to get container status \"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\": rpc error: code = NotFound desc = could not find container \"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\": container with ID starting with b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.476412 4718 scope.go:117] "RemoveContainer" containerID="a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.476667 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035"} err="failed to get container status \"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\": rpc error: code = NotFound desc = could not find container \"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\": container with ID starting with a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.476684 4718 scope.go:117] "RemoveContainer" containerID="f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.476887 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa"} err="failed to get container status \"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\": rpc error: code = NotFound desc = could not find container \"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\": container with ID starting with f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.476902 4718 scope.go:117] "RemoveContainer" containerID="929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.477174 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0"} err="failed to get container status \"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\": rpc error: code = NotFound desc = could not find container \"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\": container with ID starting with 929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.477196 4718 scope.go:117] "RemoveContainer" containerID="9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.477450 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96"} err="failed to get container status \"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\": rpc error: code = NotFound desc = could not find container \"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\": container with ID starting with 9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.477472 4718 scope.go:117] "RemoveContainer" containerID="8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.477711 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d"} err="failed to get container status \"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\": rpc error: code = NotFound desc = could not find container \"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\": container with ID starting with 8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.477732 4718 scope.go:117] "RemoveContainer" containerID="84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.478084 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916"} err="failed to get container status \"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\": rpc error: code = NotFound desc = could not find container \"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\": container with ID starting with 84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.478101 4718 scope.go:117] "RemoveContainer" containerID="5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.478307 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632"} err="failed to get container status \"5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\": rpc error: code = NotFound desc = could not find container \"5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\": container with ID starting with 5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.478323 4718 scope.go:117] "RemoveContainer" containerID="bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.478523 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9"} err="failed to get container status \"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9\": rpc error: code = NotFound desc = could not find container \"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9\": container with ID starting with bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.478539 4718 scope.go:117] "RemoveContainer" containerID="14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.478756 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622"} err="failed to get container status \"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\": rpc error: code = NotFound desc = could not find container \"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\": container with ID starting with 14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.478774 4718 scope.go:117] "RemoveContainer" containerID="b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.479037 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0"} err="failed to get container status \"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\": rpc error: code = NotFound desc = could not find container \"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\": container with ID starting with b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.479056 4718 scope.go:117] "RemoveContainer" containerID="a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.479241 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035"} err="failed to get container status \"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\": rpc error: code = NotFound desc = could not find container \"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\": container with ID starting with a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.479255 4718 scope.go:117] "RemoveContainer" containerID="f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.479587 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa"} err="failed to get container status \"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\": rpc error: code = NotFound desc = could not find container \"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\": container with ID starting with f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.479607 4718 scope.go:117] "RemoveContainer" containerID="929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.479788 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0"} err="failed to get container status \"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\": rpc error: code = NotFound desc = could not find container \"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\": container with ID starting with 929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.479810 4718 scope.go:117] "RemoveContainer" containerID="9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.480125 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96"} err="failed to get container status \"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\": rpc error: code = NotFound desc = could not find container \"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\": container with ID starting with 9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.480144 4718 scope.go:117] "RemoveContainer" containerID="8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.480326 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d"} err="failed to get container status \"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\": rpc error: code = NotFound desc = could not find container \"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\": container with ID starting with 8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.480343 4718 scope.go:117] "RemoveContainer" containerID="84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.480554 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916"} err="failed to get container status \"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\": rpc error: code = NotFound desc = could not find container \"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\": container with ID starting with 84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.480574 4718 scope.go:117] "RemoveContainer" containerID="5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.480808 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632"} err="failed to get container status \"5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\": rpc error: code = NotFound desc = could not find container \"5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\": container with ID starting with 5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.480825 4718 scope.go:117] "RemoveContainer" containerID="bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.481032 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9"} err="failed to get container status \"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9\": rpc error: code = NotFound desc = could not find container \"bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9\": container with ID starting with bb2483a37a7ac8092a6c0f543568c2679f7901efa184b82e4785fc7d1d282de9 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.481055 4718 scope.go:117] "RemoveContainer" containerID="14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.481282 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622"} err="failed to get container status \"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\": rpc error: code = NotFound desc = could not find container \"14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622\": container with ID starting with 14047c6f6c5b355ed402b7d3c9bf7b3aea877205a1bd1f8bed40383ae152b622 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.481300 4718 scope.go:117] "RemoveContainer" containerID="b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.481488 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0"} err="failed to get container status \"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\": rpc error: code = NotFound desc = could not find container \"b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0\": container with ID starting with b65461e83a91b757a6b2351296a450159fa2880a8e6c512c5a8f0ee58a5ff1c0 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.481509 4718 scope.go:117] "RemoveContainer" containerID="a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.481809 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035"} err="failed to get container status \"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\": rpc error: code = NotFound desc = could not find container \"a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035\": container with ID starting with a87ce5be1ca010d0a39e1e3ff89fe097c1af50cce35888f019daa047c3f54035 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.481825 4718 scope.go:117] "RemoveContainer" containerID="f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.482019 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa"} err="failed to get container status \"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\": rpc error: code = NotFound desc = could not find container \"f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa\": container with ID starting with f8a707d2b212fa0aecc0b9148b00acfb91e89b836ae1c32b98d317373426d0fa not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.482041 4718 scope.go:117] "RemoveContainer" containerID="929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.482322 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0"} err="failed to get container status \"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\": rpc error: code = NotFound desc = could not find container \"929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0\": container with ID starting with 929c588509d6d19ab89d2d73882f695a34298c735ce984386240b8d40752b9f0 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.482346 4718 scope.go:117] "RemoveContainer" containerID="9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.482604 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96"} err="failed to get container status \"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\": rpc error: code = NotFound desc = could not find container \"9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96\": container with ID starting with 9038e72e681c4e7bb3d53ae07d6a557f69a3b58d9a0400095b78b7f74d1d0a96 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.482629 4718 scope.go:117] "RemoveContainer" containerID="8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.482877 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d"} err="failed to get container status \"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\": rpc error: code = NotFound desc = could not find container \"8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d\": container with ID starting with 8b0f8e36d36d8f2849f4a1fd7d3b9c75e7f3f4bffe21b8d30dd1259cdb02ec0d not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.482899 4718 scope.go:117] "RemoveContainer" containerID="84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.483142 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916"} err="failed to get container status \"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\": rpc error: code = NotFound desc = could not find container \"84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916\": container with ID starting with 84910b4dbaec2e00aed8c9a5711769eca0baf6bdea68f610acf303fdd8eb5916 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.483162 4718 scope.go:117] "RemoveContainer" containerID="5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.483400 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632"} err="failed to get container status \"5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\": rpc error: code = NotFound desc = could not find container \"5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632\": container with ID starting with 5a0e4ada0970d612daf16bb922bfd6922ed271bdefb19077ae96c159efe63632 not found: ID does not exist" Nov 24 08:45:20 crc kubenswrapper[4718]: I1124 08:45:20.607858 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="692d15f5-2875-47c6-92e3-3c99bfd6b7ea" path="/var/lib/kubelet/pods/692d15f5-2875-47c6-92e3-3c99bfd6b7ea/volumes" Nov 24 08:45:21 crc kubenswrapper[4718]: I1124 08:45:21.320775 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zvlvh_811ba3ee-aad5-427c-84f7-fbd3b78255ec/kube-multus/2.log" Nov 24 08:45:21 crc kubenswrapper[4718]: I1124 08:45:21.324162 4718 generic.go:334] "Generic (PLEG): container finished" podID="e42afd35-3a21-4488-bf70-25bc69f6abdd" containerID="f0fc494c968c691eb23952ee43ad800bd55dee91025d7b12017a93745b8088cb" exitCode=0 Nov 24 08:45:21 crc kubenswrapper[4718]: I1124 08:45:21.324246 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" event={"ID":"e42afd35-3a21-4488-bf70-25bc69f6abdd","Type":"ContainerDied","Data":"f0fc494c968c691eb23952ee43ad800bd55dee91025d7b12017a93745b8088cb"} Nov 24 08:45:21 crc kubenswrapper[4718]: I1124 08:45:21.324431 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" event={"ID":"e42afd35-3a21-4488-bf70-25bc69f6abdd","Type":"ContainerStarted","Data":"880700cd87cbb98b5d0d7327fbce27684c1058479fe2afc2898f144e319313cf"} Nov 24 08:45:22 crc kubenswrapper[4718]: I1124 08:45:22.044685 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:45:22 crc kubenswrapper[4718]: I1124 08:45:22.045311 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:45:22 crc kubenswrapper[4718]: I1124 08:45:22.045359 4718 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:45:22 crc kubenswrapper[4718]: I1124 08:45:22.045955 4718 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"873c6f9762288e9dfb0f0664bf7a56f9f72a8fa6abf831277ce0db85d93a114f"} pod="openshift-machine-config-operator/machine-config-daemon-575gl" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 08:45:22 crc kubenswrapper[4718]: I1124 08:45:22.046038 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" containerID="cri-o://873c6f9762288e9dfb0f0664bf7a56f9f72a8fa6abf831277ce0db85d93a114f" gracePeriod=600 Nov 24 08:45:22 crc kubenswrapper[4718]: I1124 08:45:22.332113 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" event={"ID":"e42afd35-3a21-4488-bf70-25bc69f6abdd","Type":"ContainerStarted","Data":"e7ac9a433b9bda2c0434db185dc0b4a701840fa8e8b41dd8a02d706382d78902"} Nov 24 08:45:22 crc kubenswrapper[4718]: I1124 08:45:22.332165 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" event={"ID":"e42afd35-3a21-4488-bf70-25bc69f6abdd","Type":"ContainerStarted","Data":"9374d4bfcd81d27e81d1682079e246ee131a434764c758cf7dd71b687ffad351"} Nov 24 08:45:22 crc kubenswrapper[4718]: I1124 08:45:22.332178 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" event={"ID":"e42afd35-3a21-4488-bf70-25bc69f6abdd","Type":"ContainerStarted","Data":"25ed6cc1e307da2cd20ac1934b85f12b19240e7c6c6edc116fa236a3bee4e218"} Nov 24 08:45:22 crc kubenswrapper[4718]: I1124 08:45:22.332299 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" event={"ID":"e42afd35-3a21-4488-bf70-25bc69f6abdd","Type":"ContainerStarted","Data":"12286b1bc2949fa3b4e6b2985b4ce15387913b21e75abc021acb8909f3e13aba"} Nov 24 08:45:22 crc kubenswrapper[4718]: I1124 08:45:22.332337 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" event={"ID":"e42afd35-3a21-4488-bf70-25bc69f6abdd","Type":"ContainerStarted","Data":"ced6d5bc4447a6747f8d3db27c5844d6ddacf39a12b0b0eaccf137cbcc59b4e4"} Nov 24 08:45:22 crc kubenswrapper[4718]: I1124 08:45:22.332350 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" event={"ID":"e42afd35-3a21-4488-bf70-25bc69f6abdd","Type":"ContainerStarted","Data":"8196b17644cb348ebee6d9a13a9e3fc3111fe151c969e9ae78a57c9755754470"} Nov 24 08:45:22 crc kubenswrapper[4718]: I1124 08:45:22.335165 4718 generic.go:334] "Generic (PLEG): container finished" podID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerID="873c6f9762288e9dfb0f0664bf7a56f9f72a8fa6abf831277ce0db85d93a114f" exitCode=0 Nov 24 08:45:22 crc kubenswrapper[4718]: I1124 08:45:22.335221 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerDied","Data":"873c6f9762288e9dfb0f0664bf7a56f9f72a8fa6abf831277ce0db85d93a114f"} Nov 24 08:45:22 crc kubenswrapper[4718]: I1124 08:45:22.335310 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerStarted","Data":"617141ef091b02db6eb1b54328e03850e47f6f2d095a10a3726c1cd67c78f520"} Nov 24 08:45:22 crc kubenswrapper[4718]: I1124 08:45:22.335344 4718 scope.go:117] "RemoveContainer" containerID="3a7ca29b97a51852d552c1a2d7e2bdb50bf9e50e07b800355266295362166198" Nov 24 08:45:24 crc kubenswrapper[4718]: I1124 08:45:24.351197 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" event={"ID":"e42afd35-3a21-4488-bf70-25bc69f6abdd","Type":"ContainerStarted","Data":"3949de196adc56849560223366c9551d71c4eeb56e4f0456688681e7eda914a2"} Nov 24 08:45:27 crc kubenswrapper[4718]: I1124 08:45:27.370296 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" event={"ID":"e42afd35-3a21-4488-bf70-25bc69f6abdd","Type":"ContainerStarted","Data":"b8a697afa573c69fdc4b24ba90f50f367a3335fffd85102c84a1f0b15d573352"} Nov 24 08:45:27 crc kubenswrapper[4718]: I1124 08:45:27.370858 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:27 crc kubenswrapper[4718]: I1124 08:45:27.408466 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" podStartSLOduration=7.408451011 podStartE2EDuration="7.408451011s" podCreationTimestamp="2025-11-24 08:45:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:45:27.408206365 +0000 UTC m=+599.524497279" watchObservedRunningTime="2025-11-24 08:45:27.408451011 +0000 UTC m=+599.524741915" Nov 24 08:45:27 crc kubenswrapper[4718]: I1124 08:45:27.416231 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:28 crc kubenswrapper[4718]: I1124 08:45:28.375246 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:28 crc kubenswrapper[4718]: I1124 08:45:28.375601 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:28 crc kubenswrapper[4718]: I1124 08:45:28.400604 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:31 crc kubenswrapper[4718]: I1124 08:45:31.597125 4718 scope.go:117] "RemoveContainer" containerID="d8ca5e7bf3e442fc225cdf16965d3a1960705e567187f8aae5a8e47e781c4a3c" Nov 24 08:45:31 crc kubenswrapper[4718]: E1124 08:45:31.598100 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-zvlvh_openshift-multus(811ba3ee-aad5-427c-84f7-fbd3b78255ec)\"" pod="openshift-multus/multus-zvlvh" podUID="811ba3ee-aad5-427c-84f7-fbd3b78255ec" Nov 24 08:45:42 crc kubenswrapper[4718]: I1124 08:45:42.596623 4718 scope.go:117] "RemoveContainer" containerID="d8ca5e7bf3e442fc225cdf16965d3a1960705e567187f8aae5a8e47e781c4a3c" Nov 24 08:45:43 crc kubenswrapper[4718]: I1124 08:45:43.260815 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw"] Nov 24 08:45:43 crc kubenswrapper[4718]: I1124 08:45:43.262241 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" Nov 24 08:45:43 crc kubenswrapper[4718]: I1124 08:45:43.263717 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 24 08:45:43 crc kubenswrapper[4718]: I1124 08:45:43.268929 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw"] Nov 24 08:45:43 crc kubenswrapper[4718]: I1124 08:45:43.291691 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5s2pf\" (UniqueName: \"kubernetes.io/projected/db65587c-3818-4123-ae2b-eb66c4cf3ddb-kube-api-access-5s2pf\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw\" (UID: \"db65587c-3818-4123-ae2b-eb66c4cf3ddb\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" Nov 24 08:45:43 crc kubenswrapper[4718]: I1124 08:45:43.291754 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/db65587c-3818-4123-ae2b-eb66c4cf3ddb-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw\" (UID: \"db65587c-3818-4123-ae2b-eb66c4cf3ddb\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" Nov 24 08:45:43 crc kubenswrapper[4718]: I1124 08:45:43.291857 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/db65587c-3818-4123-ae2b-eb66c4cf3ddb-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw\" (UID: \"db65587c-3818-4123-ae2b-eb66c4cf3ddb\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" Nov 24 08:45:43 crc kubenswrapper[4718]: I1124 08:45:43.392574 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/db65587c-3818-4123-ae2b-eb66c4cf3ddb-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw\" (UID: \"db65587c-3818-4123-ae2b-eb66c4cf3ddb\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" Nov 24 08:45:43 crc kubenswrapper[4718]: I1124 08:45:43.392635 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/db65587c-3818-4123-ae2b-eb66c4cf3ddb-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw\" (UID: \"db65587c-3818-4123-ae2b-eb66c4cf3ddb\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" Nov 24 08:45:43 crc kubenswrapper[4718]: I1124 08:45:43.392691 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5s2pf\" (UniqueName: \"kubernetes.io/projected/db65587c-3818-4123-ae2b-eb66c4cf3ddb-kube-api-access-5s2pf\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw\" (UID: \"db65587c-3818-4123-ae2b-eb66c4cf3ddb\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" Nov 24 08:45:43 crc kubenswrapper[4718]: I1124 08:45:43.393104 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/db65587c-3818-4123-ae2b-eb66c4cf3ddb-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw\" (UID: \"db65587c-3818-4123-ae2b-eb66c4cf3ddb\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" Nov 24 08:45:43 crc kubenswrapper[4718]: I1124 08:45:43.393122 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/db65587c-3818-4123-ae2b-eb66c4cf3ddb-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw\" (UID: \"db65587c-3818-4123-ae2b-eb66c4cf3ddb\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" Nov 24 08:45:43 crc kubenswrapper[4718]: I1124 08:45:43.412556 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5s2pf\" (UniqueName: \"kubernetes.io/projected/db65587c-3818-4123-ae2b-eb66c4cf3ddb-kube-api-access-5s2pf\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw\" (UID: \"db65587c-3818-4123-ae2b-eb66c4cf3ddb\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" Nov 24 08:45:43 crc kubenswrapper[4718]: I1124 08:45:43.457170 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zvlvh_811ba3ee-aad5-427c-84f7-fbd3b78255ec/kube-multus/2.log" Nov 24 08:45:43 crc kubenswrapper[4718]: I1124 08:45:43.457240 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zvlvh" event={"ID":"811ba3ee-aad5-427c-84f7-fbd3b78255ec","Type":"ContainerStarted","Data":"3e84432f4a93d7a2270378ae58936e8f14155667cd4671afd9ed5d70e5a1dafd"} Nov 24 08:45:43 crc kubenswrapper[4718]: I1124 08:45:43.578323 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" Nov 24 08:45:43 crc kubenswrapper[4718]: E1124 08:45:43.597512 4718 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw_openshift-marketplace_db65587c-3818-4123-ae2b-eb66c4cf3ddb_0(0725c82486c5bac6f0ebb26c7469ef2269abbf6cc8775300e875c260c9b4943a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 08:45:43 crc kubenswrapper[4718]: E1124 08:45:43.597775 4718 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw_openshift-marketplace_db65587c-3818-4123-ae2b-eb66c4cf3ddb_0(0725c82486c5bac6f0ebb26c7469ef2269abbf6cc8775300e875c260c9b4943a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" Nov 24 08:45:43 crc kubenswrapper[4718]: E1124 08:45:43.597805 4718 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw_openshift-marketplace_db65587c-3818-4123-ae2b-eb66c4cf3ddb_0(0725c82486c5bac6f0ebb26c7469ef2269abbf6cc8775300e875c260c9b4943a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" Nov 24 08:45:43 crc kubenswrapper[4718]: E1124 08:45:43.597879 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw_openshift-marketplace(db65587c-3818-4123-ae2b-eb66c4cf3ddb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw_openshift-marketplace(db65587c-3818-4123-ae2b-eb66c4cf3ddb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw_openshift-marketplace_db65587c-3818-4123-ae2b-eb66c4cf3ddb_0(0725c82486c5bac6f0ebb26c7469ef2269abbf6cc8775300e875c260c9b4943a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" podUID="db65587c-3818-4123-ae2b-eb66c4cf3ddb" Nov 24 08:45:44 crc kubenswrapper[4718]: I1124 08:45:44.462009 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" Nov 24 08:45:44 crc kubenswrapper[4718]: I1124 08:45:44.462557 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" Nov 24 08:45:44 crc kubenswrapper[4718]: I1124 08:45:44.615650 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw"] Nov 24 08:45:44 crc kubenswrapper[4718]: W1124 08:45:44.622583 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddb65587c_3818_4123_ae2b_eb66c4cf3ddb.slice/crio-9c11721903b4b7c9c3baeaba64186837fa579dbc48e52918d4268297242abfce WatchSource:0}: Error finding container 9c11721903b4b7c9c3baeaba64186837fa579dbc48e52918d4268297242abfce: Status 404 returned error can't find the container with id 9c11721903b4b7c9c3baeaba64186837fa579dbc48e52918d4268297242abfce Nov 24 08:45:45 crc kubenswrapper[4718]: I1124 08:45:45.468041 4718 generic.go:334] "Generic (PLEG): container finished" podID="db65587c-3818-4123-ae2b-eb66c4cf3ddb" containerID="2099160db98562acdd97eaef62935af16550edd79d269442b77b854e936f0756" exitCode=0 Nov 24 08:45:45 crc kubenswrapper[4718]: I1124 08:45:45.468087 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" event={"ID":"db65587c-3818-4123-ae2b-eb66c4cf3ddb","Type":"ContainerDied","Data":"2099160db98562acdd97eaef62935af16550edd79d269442b77b854e936f0756"} Nov 24 08:45:45 crc kubenswrapper[4718]: I1124 08:45:45.468113 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" event={"ID":"db65587c-3818-4123-ae2b-eb66c4cf3ddb","Type":"ContainerStarted","Data":"9c11721903b4b7c9c3baeaba64186837fa579dbc48e52918d4268297242abfce"} Nov 24 08:45:45 crc kubenswrapper[4718]: I1124 08:45:45.470202 4718 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 08:45:47 crc kubenswrapper[4718]: I1124 08:45:47.480914 4718 generic.go:334] "Generic (PLEG): container finished" podID="db65587c-3818-4123-ae2b-eb66c4cf3ddb" containerID="7dbeb84c6e645130bc59711b339e78bb9892c35fd79c963844953c7761024181" exitCode=0 Nov 24 08:45:47 crc kubenswrapper[4718]: I1124 08:45:47.481012 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" event={"ID":"db65587c-3818-4123-ae2b-eb66c4cf3ddb","Type":"ContainerDied","Data":"7dbeb84c6e645130bc59711b339e78bb9892c35fd79c963844953c7761024181"} Nov 24 08:45:48 crc kubenswrapper[4718]: I1124 08:45:48.490401 4718 generic.go:334] "Generic (PLEG): container finished" podID="db65587c-3818-4123-ae2b-eb66c4cf3ddb" containerID="cc08aa316ad0740e5f661ed8f0c53b8cd20ba1cda1b00d18333e32022adc00a3" exitCode=0 Nov 24 08:45:48 crc kubenswrapper[4718]: I1124 08:45:48.490890 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" event={"ID":"db65587c-3818-4123-ae2b-eb66c4cf3ddb","Type":"ContainerDied","Data":"cc08aa316ad0740e5f661ed8f0c53b8cd20ba1cda1b00d18333e32022adc00a3"} Nov 24 08:45:49 crc kubenswrapper[4718]: I1124 08:45:49.710568 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" Nov 24 08:45:49 crc kubenswrapper[4718]: I1124 08:45:49.875810 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/db65587c-3818-4123-ae2b-eb66c4cf3ddb-util\") pod \"db65587c-3818-4123-ae2b-eb66c4cf3ddb\" (UID: \"db65587c-3818-4123-ae2b-eb66c4cf3ddb\") " Nov 24 08:45:49 crc kubenswrapper[4718]: I1124 08:45:49.876156 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5s2pf\" (UniqueName: \"kubernetes.io/projected/db65587c-3818-4123-ae2b-eb66c4cf3ddb-kube-api-access-5s2pf\") pod \"db65587c-3818-4123-ae2b-eb66c4cf3ddb\" (UID: \"db65587c-3818-4123-ae2b-eb66c4cf3ddb\") " Nov 24 08:45:49 crc kubenswrapper[4718]: I1124 08:45:49.876197 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/db65587c-3818-4123-ae2b-eb66c4cf3ddb-bundle\") pod \"db65587c-3818-4123-ae2b-eb66c4cf3ddb\" (UID: \"db65587c-3818-4123-ae2b-eb66c4cf3ddb\") " Nov 24 08:45:49 crc kubenswrapper[4718]: I1124 08:45:49.877160 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db65587c-3818-4123-ae2b-eb66c4cf3ddb-bundle" (OuterVolumeSpecName: "bundle") pod "db65587c-3818-4123-ae2b-eb66c4cf3ddb" (UID: "db65587c-3818-4123-ae2b-eb66c4cf3ddb"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:45:49 crc kubenswrapper[4718]: I1124 08:45:49.883682 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db65587c-3818-4123-ae2b-eb66c4cf3ddb-kube-api-access-5s2pf" (OuterVolumeSpecName: "kube-api-access-5s2pf") pod "db65587c-3818-4123-ae2b-eb66c4cf3ddb" (UID: "db65587c-3818-4123-ae2b-eb66c4cf3ddb"). InnerVolumeSpecName "kube-api-access-5s2pf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:45:49 crc kubenswrapper[4718]: I1124 08:45:49.892215 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db65587c-3818-4123-ae2b-eb66c4cf3ddb-util" (OuterVolumeSpecName: "util") pod "db65587c-3818-4123-ae2b-eb66c4cf3ddb" (UID: "db65587c-3818-4123-ae2b-eb66c4cf3ddb"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:45:49 crc kubenswrapper[4718]: I1124 08:45:49.977828 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5s2pf\" (UniqueName: \"kubernetes.io/projected/db65587c-3818-4123-ae2b-eb66c4cf3ddb-kube-api-access-5s2pf\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:49 crc kubenswrapper[4718]: I1124 08:45:49.977875 4718 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/db65587c-3818-4123-ae2b-eb66c4cf3ddb-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:49 crc kubenswrapper[4718]: I1124 08:45:49.977886 4718 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/db65587c-3818-4123-ae2b-eb66c4cf3ddb-util\") on node \"crc\" DevicePath \"\"" Nov 24 08:45:50 crc kubenswrapper[4718]: I1124 08:45:50.468406 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mwbgr" Nov 24 08:45:50 crc kubenswrapper[4718]: I1124 08:45:50.519724 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" event={"ID":"db65587c-3818-4123-ae2b-eb66c4cf3ddb","Type":"ContainerDied","Data":"9c11721903b4b7c9c3baeaba64186837fa579dbc48e52918d4268297242abfce"} Nov 24 08:45:50 crc kubenswrapper[4718]: I1124 08:45:50.519783 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c11721903b4b7c9c3baeaba64186837fa579dbc48e52918d4268297242abfce" Nov 24 08:45:50 crc kubenswrapper[4718]: I1124 08:45:50.519824 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.089816 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v"] Nov 24 08:45:59 crc kubenswrapper[4718]: E1124 08:45:59.090612 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db65587c-3818-4123-ae2b-eb66c4cf3ddb" containerName="util" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.090627 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="db65587c-3818-4123-ae2b-eb66c4cf3ddb" containerName="util" Nov 24 08:45:59 crc kubenswrapper[4718]: E1124 08:45:59.090635 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db65587c-3818-4123-ae2b-eb66c4cf3ddb" containerName="pull" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.090642 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="db65587c-3818-4123-ae2b-eb66c4cf3ddb" containerName="pull" Nov 24 08:45:59 crc kubenswrapper[4718]: E1124 08:45:59.090652 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db65587c-3818-4123-ae2b-eb66c4cf3ddb" containerName="extract" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.090657 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="db65587c-3818-4123-ae2b-eb66c4cf3ddb" containerName="extract" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.090743 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="db65587c-3818-4123-ae2b-eb66c4cf3ddb" containerName="extract" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.091172 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.094574 4718 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.094707 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.094755 4718 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-l6f9h" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.094766 4718 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.094805 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.112364 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v"] Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.194016 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/95692fb9-b77a-4c2f-8263-c726a880f5d8-webhook-cert\") pod \"metallb-operator-controller-manager-667b6d8949-jgz4v\" (UID: \"95692fb9-b77a-4c2f-8263-c726a880f5d8\") " pod="metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.194141 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pm4rq\" (UniqueName: \"kubernetes.io/projected/95692fb9-b77a-4c2f-8263-c726a880f5d8-kube-api-access-pm4rq\") pod \"metallb-operator-controller-manager-667b6d8949-jgz4v\" (UID: \"95692fb9-b77a-4c2f-8263-c726a880f5d8\") " pod="metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.194362 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/95692fb9-b77a-4c2f-8263-c726a880f5d8-apiservice-cert\") pod \"metallb-operator-controller-manager-667b6d8949-jgz4v\" (UID: \"95692fb9-b77a-4c2f-8263-c726a880f5d8\") " pod="metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.295254 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pm4rq\" (UniqueName: \"kubernetes.io/projected/95692fb9-b77a-4c2f-8263-c726a880f5d8-kube-api-access-pm4rq\") pod \"metallb-operator-controller-manager-667b6d8949-jgz4v\" (UID: \"95692fb9-b77a-4c2f-8263-c726a880f5d8\") " pod="metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.295348 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/95692fb9-b77a-4c2f-8263-c726a880f5d8-apiservice-cert\") pod \"metallb-operator-controller-manager-667b6d8949-jgz4v\" (UID: \"95692fb9-b77a-4c2f-8263-c726a880f5d8\") " pod="metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.296075 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/95692fb9-b77a-4c2f-8263-c726a880f5d8-webhook-cert\") pod \"metallb-operator-controller-manager-667b6d8949-jgz4v\" (UID: \"95692fb9-b77a-4c2f-8263-c726a880f5d8\") " pod="metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.303644 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/95692fb9-b77a-4c2f-8263-c726a880f5d8-apiservice-cert\") pod \"metallb-operator-controller-manager-667b6d8949-jgz4v\" (UID: \"95692fb9-b77a-4c2f-8263-c726a880f5d8\") " pod="metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.304075 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/95692fb9-b77a-4c2f-8263-c726a880f5d8-webhook-cert\") pod \"metallb-operator-controller-manager-667b6d8949-jgz4v\" (UID: \"95692fb9-b77a-4c2f-8263-c726a880f5d8\") " pod="metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.312281 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pm4rq\" (UniqueName: \"kubernetes.io/projected/95692fb9-b77a-4c2f-8263-c726a880f5d8-kube-api-access-pm4rq\") pod \"metallb-operator-controller-manager-667b6d8949-jgz4v\" (UID: \"95692fb9-b77a-4c2f-8263-c726a880f5d8\") " pod="metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.329175 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8"] Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.329898 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.332745 4718 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.332745 4718 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.333300 4718 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-gnvmt" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.360399 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8"] Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.406145 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.503633 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5cl8\" (UniqueName: \"kubernetes.io/projected/2a38fc4c-62ec-4435-a15b-7b771d914c3e-kube-api-access-k5cl8\") pod \"metallb-operator-webhook-server-d745c75f7-xqzh8\" (UID: \"2a38fc4c-62ec-4435-a15b-7b771d914c3e\") " pod="metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.503677 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2a38fc4c-62ec-4435-a15b-7b771d914c3e-apiservice-cert\") pod \"metallb-operator-webhook-server-d745c75f7-xqzh8\" (UID: \"2a38fc4c-62ec-4435-a15b-7b771d914c3e\") " pod="metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.503705 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2a38fc4c-62ec-4435-a15b-7b771d914c3e-webhook-cert\") pod \"metallb-operator-webhook-server-d745c75f7-xqzh8\" (UID: \"2a38fc4c-62ec-4435-a15b-7b771d914c3e\") " pod="metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.605221 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5cl8\" (UniqueName: \"kubernetes.io/projected/2a38fc4c-62ec-4435-a15b-7b771d914c3e-kube-api-access-k5cl8\") pod \"metallb-operator-webhook-server-d745c75f7-xqzh8\" (UID: \"2a38fc4c-62ec-4435-a15b-7b771d914c3e\") " pod="metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.605535 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2a38fc4c-62ec-4435-a15b-7b771d914c3e-apiservice-cert\") pod \"metallb-operator-webhook-server-d745c75f7-xqzh8\" (UID: \"2a38fc4c-62ec-4435-a15b-7b771d914c3e\") " pod="metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.605565 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2a38fc4c-62ec-4435-a15b-7b771d914c3e-webhook-cert\") pod \"metallb-operator-webhook-server-d745c75f7-xqzh8\" (UID: \"2a38fc4c-62ec-4435-a15b-7b771d914c3e\") " pod="metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.611417 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2a38fc4c-62ec-4435-a15b-7b771d914c3e-apiservice-cert\") pod \"metallb-operator-webhook-server-d745c75f7-xqzh8\" (UID: \"2a38fc4c-62ec-4435-a15b-7b771d914c3e\") " pod="metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.611804 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2a38fc4c-62ec-4435-a15b-7b771d914c3e-webhook-cert\") pod \"metallb-operator-webhook-server-d745c75f7-xqzh8\" (UID: \"2a38fc4c-62ec-4435-a15b-7b771d914c3e\") " pod="metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.627528 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5cl8\" (UniqueName: \"kubernetes.io/projected/2a38fc4c-62ec-4435-a15b-7b771d914c3e-kube-api-access-k5cl8\") pod \"metallb-operator-webhook-server-d745c75f7-xqzh8\" (UID: \"2a38fc4c-62ec-4435-a15b-7b771d914c3e\") " pod="metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.647296 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8" Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.655361 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v"] Nov 24 08:45:59 crc kubenswrapper[4718]: W1124 08:45:59.670490 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod95692fb9_b77a_4c2f_8263_c726a880f5d8.slice/crio-c4cd7173cb965dbd58f4634e7dcde52f796329233eda62177a0a957efc8b774a WatchSource:0}: Error finding container c4cd7173cb965dbd58f4634e7dcde52f796329233eda62177a0a957efc8b774a: Status 404 returned error can't find the container with id c4cd7173cb965dbd58f4634e7dcde52f796329233eda62177a0a957efc8b774a Nov 24 08:45:59 crc kubenswrapper[4718]: I1124 08:45:59.910203 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8"] Nov 24 08:45:59 crc kubenswrapper[4718]: W1124 08:45:59.917925 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a38fc4c_62ec_4435_a15b_7b771d914c3e.slice/crio-2fff1897200275fe81523df39068a78f9d95abfbd855a88ec7f124da2571faf6 WatchSource:0}: Error finding container 2fff1897200275fe81523df39068a78f9d95abfbd855a88ec7f124da2571faf6: Status 404 returned error can't find the container with id 2fff1897200275fe81523df39068a78f9d95abfbd855a88ec7f124da2571faf6 Nov 24 08:46:00 crc kubenswrapper[4718]: I1124 08:46:00.568284 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v" event={"ID":"95692fb9-b77a-4c2f-8263-c726a880f5d8","Type":"ContainerStarted","Data":"c4cd7173cb965dbd58f4634e7dcde52f796329233eda62177a0a957efc8b774a"} Nov 24 08:46:00 crc kubenswrapper[4718]: I1124 08:46:00.569157 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8" event={"ID":"2a38fc4c-62ec-4435-a15b-7b771d914c3e","Type":"ContainerStarted","Data":"2fff1897200275fe81523df39068a78f9d95abfbd855a88ec7f124da2571faf6"} Nov 24 08:46:04 crc kubenswrapper[4718]: I1124 08:46:04.591198 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8" event={"ID":"2a38fc4c-62ec-4435-a15b-7b771d914c3e","Type":"ContainerStarted","Data":"c1433575ad00a38369e97426a5362cb0b328c89cbd2da3c4f80955ebe203a10a"} Nov 24 08:46:04 crc kubenswrapper[4718]: I1124 08:46:04.591712 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8" Nov 24 08:46:04 crc kubenswrapper[4718]: I1124 08:46:04.592731 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v" event={"ID":"95692fb9-b77a-4c2f-8263-c726a880f5d8","Type":"ContainerStarted","Data":"6eebeacad7e9c44b1ffb16acca243692a91cfb5973231a18466cecd4b3272c42"} Nov 24 08:46:04 crc kubenswrapper[4718]: I1124 08:46:04.592864 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v" Nov 24 08:46:04 crc kubenswrapper[4718]: I1124 08:46:04.640964 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8" podStartSLOduration=1.633051609 podStartE2EDuration="5.640946073s" podCreationTimestamp="2025-11-24 08:45:59 +0000 UTC" firstStartedPulling="2025-11-24 08:45:59.923630477 +0000 UTC m=+632.039921381" lastFinishedPulling="2025-11-24 08:46:03.931524941 +0000 UTC m=+636.047815845" observedRunningTime="2025-11-24 08:46:04.613883495 +0000 UTC m=+636.730174419" watchObservedRunningTime="2025-11-24 08:46:04.640946073 +0000 UTC m=+636.757236977" Nov 24 08:46:04 crc kubenswrapper[4718]: I1124 08:46:04.641878 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v" podStartSLOduration=1.3936439919999999 podStartE2EDuration="5.641870886s" podCreationTimestamp="2025-11-24 08:45:59 +0000 UTC" firstStartedPulling="2025-11-24 08:45:59.674160531 +0000 UTC m=+631.790451435" lastFinishedPulling="2025-11-24 08:46:03.922387425 +0000 UTC m=+636.038678329" observedRunningTime="2025-11-24 08:46:04.638468682 +0000 UTC m=+636.754759596" watchObservedRunningTime="2025-11-24 08:46:04.641870886 +0000 UTC m=+636.758161790" Nov 24 08:46:19 crc kubenswrapper[4718]: I1124 08:46:19.655845 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-d745c75f7-xqzh8" Nov 24 08:46:39 crc kubenswrapper[4718]: I1124 08:46:39.409851 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-667b6d8949-jgz4v" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.166145 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-hx65l"] Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.168821 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.170713 4718 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.170816 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-4j4sx"] Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.170928 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.171222 4718 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-nvvrd" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.175281 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-6998585d5-4j4sx" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.176103 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-4j4sx"] Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.177761 4718 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.247951 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-kvdb9"] Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.248861 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-kvdb9" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.251551 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.251732 4718 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-4jrb8" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.251915 4718 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.252051 4718 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.255934 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-metrics-certs\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.255999 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4fe355ed-c72c-47f5-9d75-9c8a93614d5d-cert\") pod \"frr-k8s-webhook-server-6998585d5-4j4sx\" (UID: \"4fe355ed-c72c-47f5-9d75-9c8a93614d5d\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-4j4sx" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.256017 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7mm6\" (UniqueName: \"kubernetes.io/projected/74c28e57-44c0-4992-ad63-4a291c2fa10f-kube-api-access-b7mm6\") pod \"speaker-kvdb9\" (UID: \"74c28e57-44c0-4992-ad63-4a291c2fa10f\") " pod="metallb-system/speaker-kvdb9" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.256039 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-frr-sockets\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.256057 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/74c28e57-44c0-4992-ad63-4a291c2fa10f-metallb-excludel2\") pod \"speaker-kvdb9\" (UID: \"74c28e57-44c0-4992-ad63-4a291c2fa10f\") " pod="metallb-system/speaker-kvdb9" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.256074 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-metrics\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.256089 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74c28e57-44c0-4992-ad63-4a291c2fa10f-metrics-certs\") pod \"speaker-kvdb9\" (UID: \"74c28e57-44c0-4992-ad63-4a291c2fa10f\") " pod="metallb-system/speaker-kvdb9" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.256104 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5q57t\" (UniqueName: \"kubernetes.io/projected/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-kube-api-access-5q57t\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.256123 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ctx2\" (UniqueName: \"kubernetes.io/projected/4fe355ed-c72c-47f5-9d75-9c8a93614d5d-kube-api-access-6ctx2\") pod \"frr-k8s-webhook-server-6998585d5-4j4sx\" (UID: \"4fe355ed-c72c-47f5-9d75-9c8a93614d5d\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-4j4sx" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.256142 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-frr-startup\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.256162 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-reloader\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.256176 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-frr-conf\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.256198 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/74c28e57-44c0-4992-ad63-4a291c2fa10f-memberlist\") pod \"speaker-kvdb9\" (UID: \"74c28e57-44c0-4992-ad63-4a291c2fa10f\") " pod="metallb-system/speaker-kvdb9" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.267633 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6c7b4b5f48-c4qp8"] Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.268421 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6c7b4b5f48-c4qp8" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.269818 4718 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.291841 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6c7b4b5f48-c4qp8"] Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.357044 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ctx2\" (UniqueName: \"kubernetes.io/projected/4fe355ed-c72c-47f5-9d75-9c8a93614d5d-kube-api-access-6ctx2\") pod \"frr-k8s-webhook-server-6998585d5-4j4sx\" (UID: \"4fe355ed-c72c-47f5-9d75-9c8a93614d5d\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-4j4sx" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.357095 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a7222fa5-30a4-4387-9417-6e38f5f2f651-metrics-certs\") pod \"controller-6c7b4b5f48-c4qp8\" (UID: \"a7222fa5-30a4-4387-9417-6e38f5f2f651\") " pod="metallb-system/controller-6c7b4b5f48-c4qp8" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.357134 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-frr-startup\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.357161 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-reloader\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.357187 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-frr-conf\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.357329 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/74c28e57-44c0-4992-ad63-4a291c2fa10f-memberlist\") pod \"speaker-kvdb9\" (UID: \"74c28e57-44c0-4992-ad63-4a291c2fa10f\") " pod="metallb-system/speaker-kvdb9" Nov 24 08:46:40 crc kubenswrapper[4718]: E1124 08:46:40.357432 4718 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.357487 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-metrics-certs\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: E1124 08:46:40.357555 4718 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.357560 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4fe355ed-c72c-47f5-9d75-9c8a93614d5d-cert\") pod \"frr-k8s-webhook-server-6998585d5-4j4sx\" (UID: \"4fe355ed-c72c-47f5-9d75-9c8a93614d5d\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-4j4sx" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.357603 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-frr-conf\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: E1124 08:46:40.357622 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/74c28e57-44c0-4992-ad63-4a291c2fa10f-memberlist podName:74c28e57-44c0-4992-ad63-4a291c2fa10f nodeName:}" failed. No retries permitted until 2025-11-24 08:46:40.857573351 +0000 UTC m=+672.973864265 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/74c28e57-44c0-4992-ad63-4a291c2fa10f-memberlist") pod "speaker-kvdb9" (UID: "74c28e57-44c0-4992-ad63-4a291c2fa10f") : secret "metallb-memberlist" not found Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.357633 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-reloader\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: E1124 08:46:40.357654 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-metrics-certs podName:e349dcb4-6ed5-4eac-bf4e-cc569cced0bf nodeName:}" failed. No retries permitted until 2025-11-24 08:46:40.857644243 +0000 UTC m=+672.973935147 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-metrics-certs") pod "frr-k8s-hx65l" (UID: "e349dcb4-6ed5-4eac-bf4e-cc569cced0bf") : secret "frr-k8s-certs-secret" not found Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.357678 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7mm6\" (UniqueName: \"kubernetes.io/projected/74c28e57-44c0-4992-ad63-4a291c2fa10f-kube-api-access-b7mm6\") pod \"speaker-kvdb9\" (UID: \"74c28e57-44c0-4992-ad63-4a291c2fa10f\") " pod="metallb-system/speaker-kvdb9" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.357712 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-frr-sockets\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.357733 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a7222fa5-30a4-4387-9417-6e38f5f2f651-cert\") pod \"controller-6c7b4b5f48-c4qp8\" (UID: \"a7222fa5-30a4-4387-9417-6e38f5f2f651\") " pod="metallb-system/controller-6c7b4b5f48-c4qp8" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.357756 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-449t5\" (UniqueName: \"kubernetes.io/projected/a7222fa5-30a4-4387-9417-6e38f5f2f651-kube-api-access-449t5\") pod \"controller-6c7b4b5f48-c4qp8\" (UID: \"a7222fa5-30a4-4387-9417-6e38f5f2f651\") " pod="metallb-system/controller-6c7b4b5f48-c4qp8" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.357783 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/74c28e57-44c0-4992-ad63-4a291c2fa10f-metallb-excludel2\") pod \"speaker-kvdb9\" (UID: \"74c28e57-44c0-4992-ad63-4a291c2fa10f\") " pod="metallb-system/speaker-kvdb9" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.357808 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-metrics\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.357830 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74c28e57-44c0-4992-ad63-4a291c2fa10f-metrics-certs\") pod \"speaker-kvdb9\" (UID: \"74c28e57-44c0-4992-ad63-4a291c2fa10f\") " pod="metallb-system/speaker-kvdb9" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.357854 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5q57t\" (UniqueName: \"kubernetes.io/projected/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-kube-api-access-5q57t\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.358056 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-frr-sockets\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.358130 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-metrics\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: E1124 08:46:40.358132 4718 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Nov 24 08:46:40 crc kubenswrapper[4718]: E1124 08:46:40.358185 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/74c28e57-44c0-4992-ad63-4a291c2fa10f-metrics-certs podName:74c28e57-44c0-4992-ad63-4a291c2fa10f nodeName:}" failed. No retries permitted until 2025-11-24 08:46:40.858169726 +0000 UTC m=+672.974460630 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/74c28e57-44c0-4992-ad63-4a291c2fa10f-metrics-certs") pod "speaker-kvdb9" (UID: "74c28e57-44c0-4992-ad63-4a291c2fa10f") : secret "speaker-certs-secret" not found Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.358253 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-frr-startup\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.358476 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/74c28e57-44c0-4992-ad63-4a291c2fa10f-metallb-excludel2\") pod \"speaker-kvdb9\" (UID: \"74c28e57-44c0-4992-ad63-4a291c2fa10f\") " pod="metallb-system/speaker-kvdb9" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.375919 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5q57t\" (UniqueName: \"kubernetes.io/projected/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-kube-api-access-5q57t\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.376568 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4fe355ed-c72c-47f5-9d75-9c8a93614d5d-cert\") pod \"frr-k8s-webhook-server-6998585d5-4j4sx\" (UID: \"4fe355ed-c72c-47f5-9d75-9c8a93614d5d\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-4j4sx" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.376783 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ctx2\" (UniqueName: \"kubernetes.io/projected/4fe355ed-c72c-47f5-9d75-9c8a93614d5d-kube-api-access-6ctx2\") pod \"frr-k8s-webhook-server-6998585d5-4j4sx\" (UID: \"4fe355ed-c72c-47f5-9d75-9c8a93614d5d\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-4j4sx" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.377992 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7mm6\" (UniqueName: \"kubernetes.io/projected/74c28e57-44c0-4992-ad63-4a291c2fa10f-kube-api-access-b7mm6\") pod \"speaker-kvdb9\" (UID: \"74c28e57-44c0-4992-ad63-4a291c2fa10f\") " pod="metallb-system/speaker-kvdb9" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.459109 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a7222fa5-30a4-4387-9417-6e38f5f2f651-cert\") pod \"controller-6c7b4b5f48-c4qp8\" (UID: \"a7222fa5-30a4-4387-9417-6e38f5f2f651\") " pod="metallb-system/controller-6c7b4b5f48-c4qp8" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.459158 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-449t5\" (UniqueName: \"kubernetes.io/projected/a7222fa5-30a4-4387-9417-6e38f5f2f651-kube-api-access-449t5\") pod \"controller-6c7b4b5f48-c4qp8\" (UID: \"a7222fa5-30a4-4387-9417-6e38f5f2f651\") " pod="metallb-system/controller-6c7b4b5f48-c4qp8" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.459207 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a7222fa5-30a4-4387-9417-6e38f5f2f651-metrics-certs\") pod \"controller-6c7b4b5f48-c4qp8\" (UID: \"a7222fa5-30a4-4387-9417-6e38f5f2f651\") " pod="metallb-system/controller-6c7b4b5f48-c4qp8" Nov 24 08:46:40 crc kubenswrapper[4718]: E1124 08:46:40.459335 4718 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Nov 24 08:46:40 crc kubenswrapper[4718]: E1124 08:46:40.459397 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a7222fa5-30a4-4387-9417-6e38f5f2f651-metrics-certs podName:a7222fa5-30a4-4387-9417-6e38f5f2f651 nodeName:}" failed. No retries permitted until 2025-11-24 08:46:40.959379991 +0000 UTC m=+673.075670895 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a7222fa5-30a4-4387-9417-6e38f5f2f651-metrics-certs") pod "controller-6c7b4b5f48-c4qp8" (UID: "a7222fa5-30a4-4387-9417-6e38f5f2f651") : secret "controller-certs-secret" not found Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.461338 4718 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.476636 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a7222fa5-30a4-4387-9417-6e38f5f2f651-cert\") pod \"controller-6c7b4b5f48-c4qp8\" (UID: \"a7222fa5-30a4-4387-9417-6e38f5f2f651\") " pod="metallb-system/controller-6c7b4b5f48-c4qp8" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.480386 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-449t5\" (UniqueName: \"kubernetes.io/projected/a7222fa5-30a4-4387-9417-6e38f5f2f651-kube-api-access-449t5\") pod \"controller-6c7b4b5f48-c4qp8\" (UID: \"a7222fa5-30a4-4387-9417-6e38f5f2f651\") " pod="metallb-system/controller-6c7b4b5f48-c4qp8" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.504814 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-6998585d5-4j4sx" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.679561 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-4j4sx"] Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.780581 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-6998585d5-4j4sx" event={"ID":"4fe355ed-c72c-47f5-9d75-9c8a93614d5d","Type":"ContainerStarted","Data":"20835061dbe898465c15cc833762ce1d8b8b6acaca43f5d06cf0dd623a60dc26"} Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.863243 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74c28e57-44c0-4992-ad63-4a291c2fa10f-metrics-certs\") pod \"speaker-kvdb9\" (UID: \"74c28e57-44c0-4992-ad63-4a291c2fa10f\") " pod="metallb-system/speaker-kvdb9" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.863321 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/74c28e57-44c0-4992-ad63-4a291c2fa10f-memberlist\") pod \"speaker-kvdb9\" (UID: \"74c28e57-44c0-4992-ad63-4a291c2fa10f\") " pod="metallb-system/speaker-kvdb9" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.863350 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-metrics-certs\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: E1124 08:46:40.863491 4718 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 24 08:46:40 crc kubenswrapper[4718]: E1124 08:46:40.863564 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/74c28e57-44c0-4992-ad63-4a291c2fa10f-memberlist podName:74c28e57-44c0-4992-ad63-4a291c2fa10f nodeName:}" failed. No retries permitted until 2025-11-24 08:46:41.863544557 +0000 UTC m=+673.979835461 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/74c28e57-44c0-4992-ad63-4a291c2fa10f-memberlist") pod "speaker-kvdb9" (UID: "74c28e57-44c0-4992-ad63-4a291c2fa10f") : secret "metallb-memberlist" not found Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.868628 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74c28e57-44c0-4992-ad63-4a291c2fa10f-metrics-certs\") pod \"speaker-kvdb9\" (UID: \"74c28e57-44c0-4992-ad63-4a291c2fa10f\") " pod="metallb-system/speaker-kvdb9" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.868810 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e349dcb4-6ed5-4eac-bf4e-cc569cced0bf-metrics-certs\") pod \"frr-k8s-hx65l\" (UID: \"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf\") " pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.964619 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a7222fa5-30a4-4387-9417-6e38f5f2f651-metrics-certs\") pod \"controller-6c7b4b5f48-c4qp8\" (UID: \"a7222fa5-30a4-4387-9417-6e38f5f2f651\") " pod="metallb-system/controller-6c7b4b5f48-c4qp8" Nov 24 08:46:40 crc kubenswrapper[4718]: I1124 08:46:40.975865 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a7222fa5-30a4-4387-9417-6e38f5f2f651-metrics-certs\") pod \"controller-6c7b4b5f48-c4qp8\" (UID: \"a7222fa5-30a4-4387-9417-6e38f5f2f651\") " pod="metallb-system/controller-6c7b4b5f48-c4qp8" Nov 24 08:46:41 crc kubenswrapper[4718]: I1124 08:46:41.094141 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:41 crc kubenswrapper[4718]: I1124 08:46:41.183108 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6c7b4b5f48-c4qp8" Nov 24 08:46:41 crc kubenswrapper[4718]: I1124 08:46:41.365537 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6c7b4b5f48-c4qp8"] Nov 24 08:46:41 crc kubenswrapper[4718]: W1124 08:46:41.372346 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda7222fa5_30a4_4387_9417_6e38f5f2f651.slice/crio-06ab98ca8abfcf5ebeee42fca68e56684483d3918a86acbd4469e05d52129a51 WatchSource:0}: Error finding container 06ab98ca8abfcf5ebeee42fca68e56684483d3918a86acbd4469e05d52129a51: Status 404 returned error can't find the container with id 06ab98ca8abfcf5ebeee42fca68e56684483d3918a86acbd4469e05d52129a51 Nov 24 08:46:41 crc kubenswrapper[4718]: I1124 08:46:41.786487 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hx65l" event={"ID":"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf","Type":"ContainerStarted","Data":"87a8cfb7677eddbd861d00a6d3c50b0864bd6c7eba57adbb5ae6b00f31d83dcb"} Nov 24 08:46:41 crc kubenswrapper[4718]: I1124 08:46:41.787918 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-c4qp8" event={"ID":"a7222fa5-30a4-4387-9417-6e38f5f2f651","Type":"ContainerStarted","Data":"c5808d432de572f9754a134db759d051776fefc24a378dd2984695754fba8234"} Nov 24 08:46:41 crc kubenswrapper[4718]: I1124 08:46:41.787961 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-c4qp8" event={"ID":"a7222fa5-30a4-4387-9417-6e38f5f2f651","Type":"ContainerStarted","Data":"06ab98ca8abfcf5ebeee42fca68e56684483d3918a86acbd4469e05d52129a51"} Nov 24 08:46:41 crc kubenswrapper[4718]: I1124 08:46:41.874989 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/74c28e57-44c0-4992-ad63-4a291c2fa10f-memberlist\") pod \"speaker-kvdb9\" (UID: \"74c28e57-44c0-4992-ad63-4a291c2fa10f\") " pod="metallb-system/speaker-kvdb9" Nov 24 08:46:41 crc kubenswrapper[4718]: I1124 08:46:41.880113 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/74c28e57-44c0-4992-ad63-4a291c2fa10f-memberlist\") pod \"speaker-kvdb9\" (UID: \"74c28e57-44c0-4992-ad63-4a291c2fa10f\") " pod="metallb-system/speaker-kvdb9" Nov 24 08:46:42 crc kubenswrapper[4718]: I1124 08:46:42.064109 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-kvdb9" Nov 24 08:46:42 crc kubenswrapper[4718]: W1124 08:46:42.086118 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod74c28e57_44c0_4992_ad63_4a291c2fa10f.slice/crio-5abb6f1e044cd335c0fb53b2d4fefc537be4be365341a492380d513c99f94f6e WatchSource:0}: Error finding container 5abb6f1e044cd335c0fb53b2d4fefc537be4be365341a492380d513c99f94f6e: Status 404 returned error can't find the container with id 5abb6f1e044cd335c0fb53b2d4fefc537be4be365341a492380d513c99f94f6e Nov 24 08:46:42 crc kubenswrapper[4718]: I1124 08:46:42.798517 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-kvdb9" event={"ID":"74c28e57-44c0-4992-ad63-4a291c2fa10f","Type":"ContainerStarted","Data":"44450972b48af0e673720012f0e95801175b3c8022d0604ae916c7d43c6a5b97"} Nov 24 08:46:42 crc kubenswrapper[4718]: I1124 08:46:42.798560 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-kvdb9" event={"ID":"74c28e57-44c0-4992-ad63-4a291c2fa10f","Type":"ContainerStarted","Data":"5abb6f1e044cd335c0fb53b2d4fefc537be4be365341a492380d513c99f94f6e"} Nov 24 08:46:47 crc kubenswrapper[4718]: I1124 08:46:47.835947 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-kvdb9" event={"ID":"74c28e57-44c0-4992-ad63-4a291c2fa10f","Type":"ContainerStarted","Data":"686f015a9cd1b25f1ebfde5e0d4bad6c809243c0644a9942f61f13a73564e24a"} Nov 24 08:46:47 crc kubenswrapper[4718]: I1124 08:46:47.836513 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-kvdb9" Nov 24 08:46:47 crc kubenswrapper[4718]: I1124 08:46:47.839284 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-c4qp8" event={"ID":"a7222fa5-30a4-4387-9417-6e38f5f2f651","Type":"ContainerStarted","Data":"d6d8a1b80f996e97e9a3355083a5c266b7221c77715c58184a4c4970182bcdb4"} Nov 24 08:46:47 crc kubenswrapper[4718]: I1124 08:46:47.840046 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6c7b4b5f48-c4qp8" Nov 24 08:46:47 crc kubenswrapper[4718]: I1124 08:46:47.842354 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-6998585d5-4j4sx" event={"ID":"4fe355ed-c72c-47f5-9d75-9c8a93614d5d","Type":"ContainerStarted","Data":"b68b5780abd9070aaf79098a77555e8bd333b8ea16c0b6a76d04b6dbd4b6389b"} Nov 24 08:46:47 crc kubenswrapper[4718]: I1124 08:46:47.842874 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-6998585d5-4j4sx" Nov 24 08:46:47 crc kubenswrapper[4718]: I1124 08:46:47.845681 4718 generic.go:334] "Generic (PLEG): container finished" podID="e349dcb4-6ed5-4eac-bf4e-cc569cced0bf" containerID="abd77e69c032442725183fa4dbe311e05028a372312dea404583b781378af022" exitCode=0 Nov 24 08:46:47 crc kubenswrapper[4718]: I1124 08:46:47.845733 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hx65l" event={"ID":"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf","Type":"ContainerDied","Data":"abd77e69c032442725183fa4dbe311e05028a372312dea404583b781378af022"} Nov 24 08:46:47 crc kubenswrapper[4718]: I1124 08:46:47.859559 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-kvdb9" podStartSLOduration=2.633469245 podStartE2EDuration="7.859544347s" podCreationTimestamp="2025-11-24 08:46:40 +0000 UTC" firstStartedPulling="2025-11-24 08:46:42.305426784 +0000 UTC m=+674.421717688" lastFinishedPulling="2025-11-24 08:46:47.531501886 +0000 UTC m=+679.647792790" observedRunningTime="2025-11-24 08:46:47.856311467 +0000 UTC m=+679.972602381" watchObservedRunningTime="2025-11-24 08:46:47.859544347 +0000 UTC m=+679.975835251" Nov 24 08:46:47 crc kubenswrapper[4718]: I1124 08:46:47.899379 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-6998585d5-4j4sx" podStartSLOduration=1.020367529 podStartE2EDuration="7.899360083s" podCreationTimestamp="2025-11-24 08:46:40 +0000 UTC" firstStartedPulling="2025-11-24 08:46:40.688264077 +0000 UTC m=+672.804554981" lastFinishedPulling="2025-11-24 08:46:47.567256631 +0000 UTC m=+679.683547535" observedRunningTime="2025-11-24 08:46:47.897481756 +0000 UTC m=+680.013772670" watchObservedRunningTime="2025-11-24 08:46:47.899360083 +0000 UTC m=+680.015650987" Nov 24 08:46:47 crc kubenswrapper[4718]: I1124 08:46:47.917995 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6c7b4b5f48-c4qp8" podStartSLOduration=1.912661449 podStartE2EDuration="7.917962163s" podCreationTimestamp="2025-11-24 08:46:40 +0000 UTC" firstStartedPulling="2025-11-24 08:46:41.528799567 +0000 UTC m=+673.645090471" lastFinishedPulling="2025-11-24 08:46:47.534100281 +0000 UTC m=+679.650391185" observedRunningTime="2025-11-24 08:46:47.914257192 +0000 UTC m=+680.030548096" watchObservedRunningTime="2025-11-24 08:46:47.917962163 +0000 UTC m=+680.034253067" Nov 24 08:46:48 crc kubenswrapper[4718]: I1124 08:46:48.853745 4718 generic.go:334] "Generic (PLEG): container finished" podID="e349dcb4-6ed5-4eac-bf4e-cc569cced0bf" containerID="d8155daef8dec652fb98ebf26cb2c6243b0f1b722cbff6c10f3f7a3dfc075352" exitCode=0 Nov 24 08:46:48 crc kubenswrapper[4718]: I1124 08:46:48.853896 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hx65l" event={"ID":"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf","Type":"ContainerDied","Data":"d8155daef8dec652fb98ebf26cb2c6243b0f1b722cbff6c10f3f7a3dfc075352"} Nov 24 08:46:49 crc kubenswrapper[4718]: I1124 08:46:49.862789 4718 generic.go:334] "Generic (PLEG): container finished" podID="e349dcb4-6ed5-4eac-bf4e-cc569cced0bf" containerID="623bbdd6d1c1455bfc034029284ac59499cf644c7b9afd419cae543494aa0656" exitCode=0 Nov 24 08:46:49 crc kubenswrapper[4718]: I1124 08:46:49.862835 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hx65l" event={"ID":"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf","Type":"ContainerDied","Data":"623bbdd6d1c1455bfc034029284ac59499cf644c7b9afd419cae543494aa0656"} Nov 24 08:46:50 crc kubenswrapper[4718]: I1124 08:46:50.871662 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hx65l" event={"ID":"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf","Type":"ContainerStarted","Data":"016d383fc69308486cb78de694d81ee4efbb7c2ee4681833bd355e9453c83a51"} Nov 24 08:46:50 crc kubenswrapper[4718]: I1124 08:46:50.872085 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hx65l" event={"ID":"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf","Type":"ContainerStarted","Data":"84c6929927bc1ea23b1c2c7428bd274fc6a4f5e9587657e899ef345d10ad6d5a"} Nov 24 08:46:50 crc kubenswrapper[4718]: I1124 08:46:50.872099 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hx65l" event={"ID":"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf","Type":"ContainerStarted","Data":"f61232113ce4bf1497df3faaed11ca271dd41ee7846d61688a811c1d8268a1ff"} Nov 24 08:46:50 crc kubenswrapper[4718]: I1124 08:46:50.872108 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hx65l" event={"ID":"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf","Type":"ContainerStarted","Data":"959b66396d968a3e850f8b6aaf4757fb390a58cc2636592f096c8ac5d754bfa6"} Nov 24 08:46:50 crc kubenswrapper[4718]: I1124 08:46:50.872116 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hx65l" event={"ID":"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf","Type":"ContainerStarted","Data":"1e5a671a56c1c61a79764612d2a20d50256f2198b1f33c030fe434f4d25f8f32"} Nov 24 08:46:51 crc kubenswrapper[4718]: I1124 08:46:51.187138 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6c7b4b5f48-c4qp8" Nov 24 08:46:51 crc kubenswrapper[4718]: I1124 08:46:51.884509 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hx65l" event={"ID":"e349dcb4-6ed5-4eac-bf4e-cc569cced0bf","Type":"ContainerStarted","Data":"ad93576258dd68f48246ac0d89dd40d60bb7d728f2b37db12bdc2a968be51ecd"} Nov 24 08:46:51 crc kubenswrapper[4718]: I1124 08:46:51.885303 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:51 crc kubenswrapper[4718]: I1124 08:46:51.906508 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-hx65l" podStartSLOduration=5.568781103 podStartE2EDuration="11.906485017s" podCreationTimestamp="2025-11-24 08:46:40 +0000 UTC" firstStartedPulling="2025-11-24 08:46:41.195429753 +0000 UTC m=+673.311720657" lastFinishedPulling="2025-11-24 08:46:47.533133667 +0000 UTC m=+679.649424571" observedRunningTime="2025-11-24 08:46:51.902351655 +0000 UTC m=+684.018642559" watchObservedRunningTime="2025-11-24 08:46:51.906485017 +0000 UTC m=+684.022775921" Nov 24 08:46:52 crc kubenswrapper[4718]: I1124 08:46:52.067871 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-kvdb9" Nov 24 08:46:56 crc kubenswrapper[4718]: I1124 08:46:56.094761 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:56 crc kubenswrapper[4718]: I1124 08:46:56.138877 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-hx65l" Nov 24 08:46:58 crc kubenswrapper[4718]: I1124 08:46:58.537326 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-bqthp"] Nov 24 08:46:58 crc kubenswrapper[4718]: I1124 08:46:58.538190 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-bqthp" Nov 24 08:46:58 crc kubenswrapper[4718]: I1124 08:46:58.541200 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-index-dockercfg-hx88z" Nov 24 08:46:58 crc kubenswrapper[4718]: I1124 08:46:58.542800 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Nov 24 08:46:58 crc kubenswrapper[4718]: I1124 08:46:58.544445 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Nov 24 08:46:58 crc kubenswrapper[4718]: I1124 08:46:58.547484 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-bqthp"] Nov 24 08:46:58 crc kubenswrapper[4718]: I1124 08:46:58.702636 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8fxn\" (UniqueName: \"kubernetes.io/projected/891fc8ff-882a-4ace-9c89-e339d051b521-kube-api-access-n8fxn\") pod \"mariadb-operator-index-bqthp\" (UID: \"891fc8ff-882a-4ace-9c89-e339d051b521\") " pod="openstack-operators/mariadb-operator-index-bqthp" Nov 24 08:46:58 crc kubenswrapper[4718]: I1124 08:46:58.803705 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8fxn\" (UniqueName: \"kubernetes.io/projected/891fc8ff-882a-4ace-9c89-e339d051b521-kube-api-access-n8fxn\") pod \"mariadb-operator-index-bqthp\" (UID: \"891fc8ff-882a-4ace-9c89-e339d051b521\") " pod="openstack-operators/mariadb-operator-index-bqthp" Nov 24 08:46:58 crc kubenswrapper[4718]: I1124 08:46:58.822028 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8fxn\" (UniqueName: \"kubernetes.io/projected/891fc8ff-882a-4ace-9c89-e339d051b521-kube-api-access-n8fxn\") pod \"mariadb-operator-index-bqthp\" (UID: \"891fc8ff-882a-4ace-9c89-e339d051b521\") " pod="openstack-operators/mariadb-operator-index-bqthp" Nov 24 08:46:58 crc kubenswrapper[4718]: I1124 08:46:58.855586 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-bqthp" Nov 24 08:46:59 crc kubenswrapper[4718]: I1124 08:46:59.086664 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-bqthp"] Nov 24 08:46:59 crc kubenswrapper[4718]: I1124 08:46:59.924071 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-bqthp" event={"ID":"891fc8ff-882a-4ace-9c89-e339d051b521","Type":"ContainerStarted","Data":"3e22200b70fdf25751d68d093aa87741df55cd6479504c5054b91c0402a13132"} Nov 24 08:47:00 crc kubenswrapper[4718]: I1124 08:47:00.509507 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-6998585d5-4j4sx" Nov 24 08:47:01 crc kubenswrapper[4718]: I1124 08:47:01.100941 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-hx65l" Nov 24 08:47:01 crc kubenswrapper[4718]: I1124 08:47:01.913018 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-bqthp"] Nov 24 08:47:01 crc kubenswrapper[4718]: I1124 08:47:01.935540 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-bqthp" event={"ID":"891fc8ff-882a-4ace-9c89-e339d051b521","Type":"ContainerStarted","Data":"16dd14658d9c5a1e288bd105d189b515e55c1f1286b2705032a8063242edad81"} Nov 24 08:47:01 crc kubenswrapper[4718]: I1124 08:47:01.948419 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-bqthp" podStartSLOduration=2.100710309 podStartE2EDuration="3.948397893s" podCreationTimestamp="2025-11-24 08:46:58 +0000 UTC" firstStartedPulling="2025-11-24 08:46:59.096670284 +0000 UTC m=+691.212961188" lastFinishedPulling="2025-11-24 08:47:00.944357868 +0000 UTC m=+693.060648772" observedRunningTime="2025-11-24 08:47:01.946489786 +0000 UTC m=+694.062780690" watchObservedRunningTime="2025-11-24 08:47:01.948397893 +0000 UTC m=+694.064688797" Nov 24 08:47:02 crc kubenswrapper[4718]: I1124 08:47:02.518456 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-69p5f"] Nov 24 08:47:02 crc kubenswrapper[4718]: I1124 08:47:02.519401 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-69p5f" Nov 24 08:47:02 crc kubenswrapper[4718]: I1124 08:47:02.532544 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-69p5f"] Nov 24 08:47:02 crc kubenswrapper[4718]: I1124 08:47:02.655651 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwqq6\" (UniqueName: \"kubernetes.io/projected/98219050-8eff-435a-9511-d33ce4e58619-kube-api-access-vwqq6\") pod \"mariadb-operator-index-69p5f\" (UID: \"98219050-8eff-435a-9511-d33ce4e58619\") " pod="openstack-operators/mariadb-operator-index-69p5f" Nov 24 08:47:02 crc kubenswrapper[4718]: I1124 08:47:02.757322 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwqq6\" (UniqueName: \"kubernetes.io/projected/98219050-8eff-435a-9511-d33ce4e58619-kube-api-access-vwqq6\") pod \"mariadb-operator-index-69p5f\" (UID: \"98219050-8eff-435a-9511-d33ce4e58619\") " pod="openstack-operators/mariadb-operator-index-69p5f" Nov 24 08:47:02 crc kubenswrapper[4718]: I1124 08:47:02.775293 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwqq6\" (UniqueName: \"kubernetes.io/projected/98219050-8eff-435a-9511-d33ce4e58619-kube-api-access-vwqq6\") pod \"mariadb-operator-index-69p5f\" (UID: \"98219050-8eff-435a-9511-d33ce4e58619\") " pod="openstack-operators/mariadb-operator-index-69p5f" Nov 24 08:47:02 crc kubenswrapper[4718]: I1124 08:47:02.846018 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-69p5f" Nov 24 08:47:02 crc kubenswrapper[4718]: I1124 08:47:02.941162 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-bqthp" podUID="891fc8ff-882a-4ace-9c89-e339d051b521" containerName="registry-server" containerID="cri-o://16dd14658d9c5a1e288bd105d189b515e55c1f1286b2705032a8063242edad81" gracePeriod=2 Nov 24 08:47:03 crc kubenswrapper[4718]: I1124 08:47:03.214438 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-69p5f"] Nov 24 08:47:03 crc kubenswrapper[4718]: W1124 08:47:03.219510 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98219050_8eff_435a_9511_d33ce4e58619.slice/crio-fe03ebe306da7ce939f37c6c2386d381073190a7c2768e358f1c0a7e5a475d85 WatchSource:0}: Error finding container fe03ebe306da7ce939f37c6c2386d381073190a7c2768e358f1c0a7e5a475d85: Status 404 returned error can't find the container with id fe03ebe306da7ce939f37c6c2386d381073190a7c2768e358f1c0a7e5a475d85 Nov 24 08:47:03 crc kubenswrapper[4718]: I1124 08:47:03.246219 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-bqthp" Nov 24 08:47:03 crc kubenswrapper[4718]: I1124 08:47:03.362954 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8fxn\" (UniqueName: \"kubernetes.io/projected/891fc8ff-882a-4ace-9c89-e339d051b521-kube-api-access-n8fxn\") pod \"891fc8ff-882a-4ace-9c89-e339d051b521\" (UID: \"891fc8ff-882a-4ace-9c89-e339d051b521\") " Nov 24 08:47:03 crc kubenswrapper[4718]: I1124 08:47:03.367801 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/891fc8ff-882a-4ace-9c89-e339d051b521-kube-api-access-n8fxn" (OuterVolumeSpecName: "kube-api-access-n8fxn") pod "891fc8ff-882a-4ace-9c89-e339d051b521" (UID: "891fc8ff-882a-4ace-9c89-e339d051b521"). InnerVolumeSpecName "kube-api-access-n8fxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:47:03 crc kubenswrapper[4718]: I1124 08:47:03.464472 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8fxn\" (UniqueName: \"kubernetes.io/projected/891fc8ff-882a-4ace-9c89-e339d051b521-kube-api-access-n8fxn\") on node \"crc\" DevicePath \"\"" Nov 24 08:47:03 crc kubenswrapper[4718]: I1124 08:47:03.948247 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-69p5f" event={"ID":"98219050-8eff-435a-9511-d33ce4e58619","Type":"ContainerStarted","Data":"aeb1f97596670fede278a30c616202e67cc0d80faa4815cf129259234cff74e3"} Nov 24 08:47:03 crc kubenswrapper[4718]: I1124 08:47:03.948290 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-69p5f" event={"ID":"98219050-8eff-435a-9511-d33ce4e58619","Type":"ContainerStarted","Data":"fe03ebe306da7ce939f37c6c2386d381073190a7c2768e358f1c0a7e5a475d85"} Nov 24 08:47:03 crc kubenswrapper[4718]: I1124 08:47:03.949280 4718 generic.go:334] "Generic (PLEG): container finished" podID="891fc8ff-882a-4ace-9c89-e339d051b521" containerID="16dd14658d9c5a1e288bd105d189b515e55c1f1286b2705032a8063242edad81" exitCode=0 Nov 24 08:47:03 crc kubenswrapper[4718]: I1124 08:47:03.949305 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-bqthp" event={"ID":"891fc8ff-882a-4ace-9c89-e339d051b521","Type":"ContainerDied","Data":"16dd14658d9c5a1e288bd105d189b515e55c1f1286b2705032a8063242edad81"} Nov 24 08:47:03 crc kubenswrapper[4718]: I1124 08:47:03.949321 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-bqthp" event={"ID":"891fc8ff-882a-4ace-9c89-e339d051b521","Type":"ContainerDied","Data":"3e22200b70fdf25751d68d093aa87741df55cd6479504c5054b91c0402a13132"} Nov 24 08:47:03 crc kubenswrapper[4718]: I1124 08:47:03.949336 4718 scope.go:117] "RemoveContainer" containerID="16dd14658d9c5a1e288bd105d189b515e55c1f1286b2705032a8063242edad81" Nov 24 08:47:03 crc kubenswrapper[4718]: I1124 08:47:03.949444 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-bqthp" Nov 24 08:47:03 crc kubenswrapper[4718]: I1124 08:47:03.967264 4718 scope.go:117] "RemoveContainer" containerID="16dd14658d9c5a1e288bd105d189b515e55c1f1286b2705032a8063242edad81" Nov 24 08:47:03 crc kubenswrapper[4718]: E1124 08:47:03.967729 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16dd14658d9c5a1e288bd105d189b515e55c1f1286b2705032a8063242edad81\": container with ID starting with 16dd14658d9c5a1e288bd105d189b515e55c1f1286b2705032a8063242edad81 not found: ID does not exist" containerID="16dd14658d9c5a1e288bd105d189b515e55c1f1286b2705032a8063242edad81" Nov 24 08:47:03 crc kubenswrapper[4718]: I1124 08:47:03.967769 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16dd14658d9c5a1e288bd105d189b515e55c1f1286b2705032a8063242edad81"} err="failed to get container status \"16dd14658d9c5a1e288bd105d189b515e55c1f1286b2705032a8063242edad81\": rpc error: code = NotFound desc = could not find container \"16dd14658d9c5a1e288bd105d189b515e55c1f1286b2705032a8063242edad81\": container with ID starting with 16dd14658d9c5a1e288bd105d189b515e55c1f1286b2705032a8063242edad81 not found: ID does not exist" Nov 24 08:47:03 crc kubenswrapper[4718]: I1124 08:47:03.974160 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-69p5f" podStartSLOduration=1.550205168 podStartE2EDuration="1.974122483s" podCreationTimestamp="2025-11-24 08:47:02 +0000 UTC" firstStartedPulling="2025-11-24 08:47:03.223231674 +0000 UTC m=+695.339522578" lastFinishedPulling="2025-11-24 08:47:03.647148989 +0000 UTC m=+695.763439893" observedRunningTime="2025-11-24 08:47:03.960741162 +0000 UTC m=+696.077032086" watchObservedRunningTime="2025-11-24 08:47:03.974122483 +0000 UTC m=+696.090413387" Nov 24 08:47:03 crc kubenswrapper[4718]: I1124 08:47:03.977646 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-bqthp"] Nov 24 08:47:03 crc kubenswrapper[4718]: I1124 08:47:03.980555 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-bqthp"] Nov 24 08:47:04 crc kubenswrapper[4718]: I1124 08:47:04.603692 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="891fc8ff-882a-4ace-9c89-e339d051b521" path="/var/lib/kubelet/pods/891fc8ff-882a-4ace-9c89-e339d051b521/volumes" Nov 24 08:47:12 crc kubenswrapper[4718]: I1124 08:47:12.846659 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/mariadb-operator-index-69p5f" Nov 24 08:47:12 crc kubenswrapper[4718]: I1124 08:47:12.848430 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-index-69p5f" Nov 24 08:47:12 crc kubenswrapper[4718]: I1124 08:47:12.870146 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/mariadb-operator-index-69p5f" Nov 24 08:47:13 crc kubenswrapper[4718]: I1124 08:47:13.011246 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-index-69p5f" Nov 24 08:47:19 crc kubenswrapper[4718]: I1124 08:47:19.026206 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx"] Nov 24 08:47:19 crc kubenswrapper[4718]: E1124 08:47:19.028339 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="891fc8ff-882a-4ace-9c89-e339d051b521" containerName="registry-server" Nov 24 08:47:19 crc kubenswrapper[4718]: I1124 08:47:19.029666 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="891fc8ff-882a-4ace-9c89-e339d051b521" containerName="registry-server" Nov 24 08:47:19 crc kubenswrapper[4718]: I1124 08:47:19.029902 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="891fc8ff-882a-4ace-9c89-e339d051b521" containerName="registry-server" Nov 24 08:47:19 crc kubenswrapper[4718]: I1124 08:47:19.030852 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx" Nov 24 08:47:19 crc kubenswrapper[4718]: I1124 08:47:19.032909 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx"] Nov 24 08:47:19 crc kubenswrapper[4718]: I1124 08:47:19.033246 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-fq6vw" Nov 24 08:47:19 crc kubenswrapper[4718]: I1124 08:47:19.156237 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xj2fq\" (UniqueName: \"kubernetes.io/projected/db9da525-6b14-44ba-8895-8b862aa9f66a-kube-api-access-xj2fq\") pod \"7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx\" (UID: \"db9da525-6b14-44ba-8895-8b862aa9f66a\") " pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx" Nov 24 08:47:19 crc kubenswrapper[4718]: I1124 08:47:19.156293 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/db9da525-6b14-44ba-8895-8b862aa9f66a-bundle\") pod \"7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx\" (UID: \"db9da525-6b14-44ba-8895-8b862aa9f66a\") " pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx" Nov 24 08:47:19 crc kubenswrapper[4718]: I1124 08:47:19.156317 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/db9da525-6b14-44ba-8895-8b862aa9f66a-util\") pod \"7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx\" (UID: \"db9da525-6b14-44ba-8895-8b862aa9f66a\") " pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx" Nov 24 08:47:19 crc kubenswrapper[4718]: I1124 08:47:19.257914 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xj2fq\" (UniqueName: \"kubernetes.io/projected/db9da525-6b14-44ba-8895-8b862aa9f66a-kube-api-access-xj2fq\") pod \"7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx\" (UID: \"db9da525-6b14-44ba-8895-8b862aa9f66a\") " pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx" Nov 24 08:47:19 crc kubenswrapper[4718]: I1124 08:47:19.257987 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/db9da525-6b14-44ba-8895-8b862aa9f66a-bundle\") pod \"7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx\" (UID: \"db9da525-6b14-44ba-8895-8b862aa9f66a\") " pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx" Nov 24 08:47:19 crc kubenswrapper[4718]: I1124 08:47:19.258018 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/db9da525-6b14-44ba-8895-8b862aa9f66a-util\") pod \"7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx\" (UID: \"db9da525-6b14-44ba-8895-8b862aa9f66a\") " pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx" Nov 24 08:47:19 crc kubenswrapper[4718]: I1124 08:47:19.258611 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/db9da525-6b14-44ba-8895-8b862aa9f66a-util\") pod \"7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx\" (UID: \"db9da525-6b14-44ba-8895-8b862aa9f66a\") " pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx" Nov 24 08:47:19 crc kubenswrapper[4718]: I1124 08:47:19.259136 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/db9da525-6b14-44ba-8895-8b862aa9f66a-bundle\") pod \"7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx\" (UID: \"db9da525-6b14-44ba-8895-8b862aa9f66a\") " pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx" Nov 24 08:47:19 crc kubenswrapper[4718]: I1124 08:47:19.276702 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xj2fq\" (UniqueName: \"kubernetes.io/projected/db9da525-6b14-44ba-8895-8b862aa9f66a-kube-api-access-xj2fq\") pod \"7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx\" (UID: \"db9da525-6b14-44ba-8895-8b862aa9f66a\") " pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx" Nov 24 08:47:19 crc kubenswrapper[4718]: I1124 08:47:19.347155 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx" Nov 24 08:47:19 crc kubenswrapper[4718]: I1124 08:47:19.598898 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx"] Nov 24 08:47:20 crc kubenswrapper[4718]: I1124 08:47:20.029465 4718 generic.go:334] "Generic (PLEG): container finished" podID="db9da525-6b14-44ba-8895-8b862aa9f66a" containerID="7bb4adf56d055904b1fcac3e864df06a38a4e7abee816ece154fa5915e5927e5" exitCode=0 Nov 24 08:47:20 crc kubenswrapper[4718]: I1124 08:47:20.029581 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx" event={"ID":"db9da525-6b14-44ba-8895-8b862aa9f66a","Type":"ContainerDied","Data":"7bb4adf56d055904b1fcac3e864df06a38a4e7abee816ece154fa5915e5927e5"} Nov 24 08:47:20 crc kubenswrapper[4718]: I1124 08:47:20.030006 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx" event={"ID":"db9da525-6b14-44ba-8895-8b862aa9f66a","Type":"ContainerStarted","Data":"3306c970f48a5feedc2f8ee3c0ea7ec533d2e7bda864bbf9e86d80f1c78009cb"} Nov 24 08:47:22 crc kubenswrapper[4718]: I1124 08:47:22.044378 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:47:22 crc kubenswrapper[4718]: I1124 08:47:22.044673 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:47:22 crc kubenswrapper[4718]: I1124 08:47:22.044599 4718 generic.go:334] "Generic (PLEG): container finished" podID="db9da525-6b14-44ba-8895-8b862aa9f66a" containerID="ea273b3c391451b556c212eaafe35aab482d83fa27d81c9f79b9c38f28a22f02" exitCode=0 Nov 24 08:47:22 crc kubenswrapper[4718]: I1124 08:47:22.044628 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx" event={"ID":"db9da525-6b14-44ba-8895-8b862aa9f66a","Type":"ContainerDied","Data":"ea273b3c391451b556c212eaafe35aab482d83fa27d81c9f79b9c38f28a22f02"} Nov 24 08:47:23 crc kubenswrapper[4718]: I1124 08:47:23.053472 4718 generic.go:334] "Generic (PLEG): container finished" podID="db9da525-6b14-44ba-8895-8b862aa9f66a" containerID="22dd4dfad8043546d3902d74b8db9c475fbb9b8aedcfaea85ed81843a81be584" exitCode=0 Nov 24 08:47:23 crc kubenswrapper[4718]: I1124 08:47:23.053590 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx" event={"ID":"db9da525-6b14-44ba-8895-8b862aa9f66a","Type":"ContainerDied","Data":"22dd4dfad8043546d3902d74b8db9c475fbb9b8aedcfaea85ed81843a81be584"} Nov 24 08:47:24 crc kubenswrapper[4718]: I1124 08:47:24.260318 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx" Nov 24 08:47:24 crc kubenswrapper[4718]: I1124 08:47:24.326474 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xj2fq\" (UniqueName: \"kubernetes.io/projected/db9da525-6b14-44ba-8895-8b862aa9f66a-kube-api-access-xj2fq\") pod \"db9da525-6b14-44ba-8895-8b862aa9f66a\" (UID: \"db9da525-6b14-44ba-8895-8b862aa9f66a\") " Nov 24 08:47:24 crc kubenswrapper[4718]: I1124 08:47:24.326572 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/db9da525-6b14-44ba-8895-8b862aa9f66a-bundle\") pod \"db9da525-6b14-44ba-8895-8b862aa9f66a\" (UID: \"db9da525-6b14-44ba-8895-8b862aa9f66a\") " Nov 24 08:47:24 crc kubenswrapper[4718]: I1124 08:47:24.326592 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/db9da525-6b14-44ba-8895-8b862aa9f66a-util\") pod \"db9da525-6b14-44ba-8895-8b862aa9f66a\" (UID: \"db9da525-6b14-44ba-8895-8b862aa9f66a\") " Nov 24 08:47:24 crc kubenswrapper[4718]: I1124 08:47:24.327557 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db9da525-6b14-44ba-8895-8b862aa9f66a-bundle" (OuterVolumeSpecName: "bundle") pod "db9da525-6b14-44ba-8895-8b862aa9f66a" (UID: "db9da525-6b14-44ba-8895-8b862aa9f66a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:47:24 crc kubenswrapper[4718]: I1124 08:47:24.334238 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db9da525-6b14-44ba-8895-8b862aa9f66a-kube-api-access-xj2fq" (OuterVolumeSpecName: "kube-api-access-xj2fq") pod "db9da525-6b14-44ba-8895-8b862aa9f66a" (UID: "db9da525-6b14-44ba-8895-8b862aa9f66a"). InnerVolumeSpecName "kube-api-access-xj2fq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:47:24 crc kubenswrapper[4718]: I1124 08:47:24.342336 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db9da525-6b14-44ba-8895-8b862aa9f66a-util" (OuterVolumeSpecName: "util") pod "db9da525-6b14-44ba-8895-8b862aa9f66a" (UID: "db9da525-6b14-44ba-8895-8b862aa9f66a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:47:24 crc kubenswrapper[4718]: I1124 08:47:24.427758 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xj2fq\" (UniqueName: \"kubernetes.io/projected/db9da525-6b14-44ba-8895-8b862aa9f66a-kube-api-access-xj2fq\") on node \"crc\" DevicePath \"\"" Nov 24 08:47:24 crc kubenswrapper[4718]: I1124 08:47:24.427791 4718 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/db9da525-6b14-44ba-8895-8b862aa9f66a-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:47:24 crc kubenswrapper[4718]: I1124 08:47:24.427803 4718 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/db9da525-6b14-44ba-8895-8b862aa9f66a-util\") on node \"crc\" DevicePath \"\"" Nov 24 08:47:25 crc kubenswrapper[4718]: I1124 08:47:25.064918 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx" event={"ID":"db9da525-6b14-44ba-8895-8b862aa9f66a","Type":"ContainerDied","Data":"3306c970f48a5feedc2f8ee3c0ea7ec533d2e7bda864bbf9e86d80f1c78009cb"} Nov 24 08:47:25 crc kubenswrapper[4718]: I1124 08:47:25.064979 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3306c970f48a5feedc2f8ee3c0ea7ec533d2e7bda864bbf9e86d80f1c78009cb" Nov 24 08:47:25 crc kubenswrapper[4718]: I1124 08:47:25.064962 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.533392 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq"] Nov 24 08:47:32 crc kubenswrapper[4718]: E1124 08:47:32.534026 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db9da525-6b14-44ba-8895-8b862aa9f66a" containerName="pull" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.534039 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="db9da525-6b14-44ba-8895-8b862aa9f66a" containerName="pull" Nov 24 08:47:32 crc kubenswrapper[4718]: E1124 08:47:32.534048 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db9da525-6b14-44ba-8895-8b862aa9f66a" containerName="extract" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.534055 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="db9da525-6b14-44ba-8895-8b862aa9f66a" containerName="extract" Nov 24 08:47:32 crc kubenswrapper[4718]: E1124 08:47:32.534065 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db9da525-6b14-44ba-8895-8b862aa9f66a" containerName="util" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.534071 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="db9da525-6b14-44ba-8895-8b862aa9f66a" containerName="util" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.534166 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="db9da525-6b14-44ba-8895-8b862aa9f66a" containerName="extract" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.534698 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.537476 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-service-cert" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.537580 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-f5lt2" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.537730 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.546793 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq"] Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.626135 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2da1235c-b60e-4440-88bb-6600e6b61308-apiservice-cert\") pod \"mariadb-operator-controller-manager-65bfd68697-jw2bq\" (UID: \"2da1235c-b60e-4440-88bb-6600e6b61308\") " pod="openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.626216 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mmzj\" (UniqueName: \"kubernetes.io/projected/2da1235c-b60e-4440-88bb-6600e6b61308-kube-api-access-6mmzj\") pod \"mariadb-operator-controller-manager-65bfd68697-jw2bq\" (UID: \"2da1235c-b60e-4440-88bb-6600e6b61308\") " pod="openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.626401 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2da1235c-b60e-4440-88bb-6600e6b61308-webhook-cert\") pod \"mariadb-operator-controller-manager-65bfd68697-jw2bq\" (UID: \"2da1235c-b60e-4440-88bb-6600e6b61308\") " pod="openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.728006 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2da1235c-b60e-4440-88bb-6600e6b61308-webhook-cert\") pod \"mariadb-operator-controller-manager-65bfd68697-jw2bq\" (UID: \"2da1235c-b60e-4440-88bb-6600e6b61308\") " pod="openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.728097 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2da1235c-b60e-4440-88bb-6600e6b61308-apiservice-cert\") pod \"mariadb-operator-controller-manager-65bfd68697-jw2bq\" (UID: \"2da1235c-b60e-4440-88bb-6600e6b61308\") " pod="openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.728125 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mmzj\" (UniqueName: \"kubernetes.io/projected/2da1235c-b60e-4440-88bb-6600e6b61308-kube-api-access-6mmzj\") pod \"mariadb-operator-controller-manager-65bfd68697-jw2bq\" (UID: \"2da1235c-b60e-4440-88bb-6600e6b61308\") " pod="openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.736211 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2da1235c-b60e-4440-88bb-6600e6b61308-apiservice-cert\") pod \"mariadb-operator-controller-manager-65bfd68697-jw2bq\" (UID: \"2da1235c-b60e-4440-88bb-6600e6b61308\") " pod="openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.736270 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2da1235c-b60e-4440-88bb-6600e6b61308-webhook-cert\") pod \"mariadb-operator-controller-manager-65bfd68697-jw2bq\" (UID: \"2da1235c-b60e-4440-88bb-6600e6b61308\") " pod="openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.746756 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mmzj\" (UniqueName: \"kubernetes.io/projected/2da1235c-b60e-4440-88bb-6600e6b61308-kube-api-access-6mmzj\") pod \"mariadb-operator-controller-manager-65bfd68697-jw2bq\" (UID: \"2da1235c-b60e-4440-88bb-6600e6b61308\") " pod="openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq" Nov 24 08:47:32 crc kubenswrapper[4718]: I1124 08:47:32.853043 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq" Nov 24 08:47:33 crc kubenswrapper[4718]: I1124 08:47:33.111074 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq"] Nov 24 08:47:33 crc kubenswrapper[4718]: W1124 08:47:33.116323 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2da1235c_b60e_4440_88bb_6600e6b61308.slice/crio-5e128614e09e56f9081ffb908cf6ee39dd3a0cd20e9892c63b70028c1d746365 WatchSource:0}: Error finding container 5e128614e09e56f9081ffb908cf6ee39dd3a0cd20e9892c63b70028c1d746365: Status 404 returned error can't find the container with id 5e128614e09e56f9081ffb908cf6ee39dd3a0cd20e9892c63b70028c1d746365 Nov 24 08:47:34 crc kubenswrapper[4718]: I1124 08:47:34.123610 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq" event={"ID":"2da1235c-b60e-4440-88bb-6600e6b61308","Type":"ContainerStarted","Data":"5e128614e09e56f9081ffb908cf6ee39dd3a0cd20e9892c63b70028c1d746365"} Nov 24 08:47:37 crc kubenswrapper[4718]: I1124 08:47:37.141312 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq" event={"ID":"2da1235c-b60e-4440-88bb-6600e6b61308","Type":"ContainerStarted","Data":"096ed5dcefb9307ba5c203775a0648a7d2077fa44ca45cbc8b445fe7accde4ef"} Nov 24 08:47:40 crc kubenswrapper[4718]: I1124 08:47:40.157071 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq" event={"ID":"2da1235c-b60e-4440-88bb-6600e6b61308","Type":"ContainerStarted","Data":"3f4c58b2347a860346f7ffe7cb17cd9f8963736ed2a5268733ab75a1e75fdd5c"} Nov 24 08:47:40 crc kubenswrapper[4718]: I1124 08:47:40.157452 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq" Nov 24 08:47:40 crc kubenswrapper[4718]: I1124 08:47:40.176100 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq" podStartSLOduration=1.860832464 podStartE2EDuration="8.17607839s" podCreationTimestamp="2025-11-24 08:47:32 +0000 UTC" firstStartedPulling="2025-11-24 08:47:33.120033934 +0000 UTC m=+725.236324838" lastFinishedPulling="2025-11-24 08:47:39.43527986 +0000 UTC m=+731.551570764" observedRunningTime="2025-11-24 08:47:40.17164944 +0000 UTC m=+732.287940344" watchObservedRunningTime="2025-11-24 08:47:40.17607839 +0000 UTC m=+732.292369294" Nov 24 08:47:41 crc kubenswrapper[4718]: I1124 08:47:41.165601 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-65bfd68697-jw2bq" Nov 24 08:47:45 crc kubenswrapper[4718]: I1124 08:47:45.080989 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-6gpnb"] Nov 24 08:47:45 crc kubenswrapper[4718]: I1124 08:47:45.082236 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-6gpnb" Nov 24 08:47:45 crc kubenswrapper[4718]: I1124 08:47:45.084082 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-index-dockercfg-8wr8m" Nov 24 08:47:45 crc kubenswrapper[4718]: I1124 08:47:45.091864 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-6gpnb"] Nov 24 08:47:45 crc kubenswrapper[4718]: I1124 08:47:45.197426 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46jbj\" (UniqueName: \"kubernetes.io/projected/b2c159c8-289a-4525-a23a-a6d5b2af870d-kube-api-access-46jbj\") pod \"infra-operator-index-6gpnb\" (UID: \"b2c159c8-289a-4525-a23a-a6d5b2af870d\") " pod="openstack-operators/infra-operator-index-6gpnb" Nov 24 08:47:45 crc kubenswrapper[4718]: I1124 08:47:45.298523 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46jbj\" (UniqueName: \"kubernetes.io/projected/b2c159c8-289a-4525-a23a-a6d5b2af870d-kube-api-access-46jbj\") pod \"infra-operator-index-6gpnb\" (UID: \"b2c159c8-289a-4525-a23a-a6d5b2af870d\") " pod="openstack-operators/infra-operator-index-6gpnb" Nov 24 08:47:45 crc kubenswrapper[4718]: I1124 08:47:45.316556 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46jbj\" (UniqueName: \"kubernetes.io/projected/b2c159c8-289a-4525-a23a-a6d5b2af870d-kube-api-access-46jbj\") pod \"infra-operator-index-6gpnb\" (UID: \"b2c159c8-289a-4525-a23a-a6d5b2af870d\") " pod="openstack-operators/infra-operator-index-6gpnb" Nov 24 08:47:45 crc kubenswrapper[4718]: I1124 08:47:45.399772 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-6gpnb" Nov 24 08:47:45 crc kubenswrapper[4718]: I1124 08:47:45.924731 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-6gpnb"] Nov 24 08:47:46 crc kubenswrapper[4718]: I1124 08:47:46.188177 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-6gpnb" event={"ID":"b2c159c8-289a-4525-a23a-a6d5b2af870d","Type":"ContainerStarted","Data":"01bde1401b96e2c2304713ed6f31c9acc8753c2ba3b3a3253cecd7960fcddecc"} Nov 24 08:47:47 crc kubenswrapper[4718]: I1124 08:47:47.195369 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-6gpnb" event={"ID":"b2c159c8-289a-4525-a23a-a6d5b2af870d","Type":"ContainerStarted","Data":"48944647de160122051f0e0119c42ce610089f9c96c04e2c50f0216c60d66f31"} Nov 24 08:47:47 crc kubenswrapper[4718]: I1124 08:47:47.207610 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-6gpnb" podStartSLOduration=1.206392911 podStartE2EDuration="2.207589438s" podCreationTimestamp="2025-11-24 08:47:45 +0000 UTC" firstStartedPulling="2025-11-24 08:47:45.935962496 +0000 UTC m=+738.052253390" lastFinishedPulling="2025-11-24 08:47:46.937159013 +0000 UTC m=+739.053449917" observedRunningTime="2025-11-24 08:47:47.206173153 +0000 UTC m=+739.322464057" watchObservedRunningTime="2025-11-24 08:47:47.207589438 +0000 UTC m=+739.323880342" Nov 24 08:47:48 crc kubenswrapper[4718]: I1124 08:47:48.274556 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-6gpnb"] Nov 24 08:47:48 crc kubenswrapper[4718]: I1124 08:47:48.880422 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-snq4h"] Nov 24 08:47:48 crc kubenswrapper[4718]: I1124 08:47:48.881327 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-snq4h" Nov 24 08:47:48 crc kubenswrapper[4718]: I1124 08:47:48.890157 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-snq4h"] Nov 24 08:47:48 crc kubenswrapper[4718]: I1124 08:47:48.944082 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lnrf\" (UniqueName: \"kubernetes.io/projected/420604f4-8485-4afa-a167-ca61c1c63981-kube-api-access-6lnrf\") pod \"infra-operator-index-snq4h\" (UID: \"420604f4-8485-4afa-a167-ca61c1c63981\") " pod="openstack-operators/infra-operator-index-snq4h" Nov 24 08:47:49 crc kubenswrapper[4718]: I1124 08:47:49.045270 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lnrf\" (UniqueName: \"kubernetes.io/projected/420604f4-8485-4afa-a167-ca61c1c63981-kube-api-access-6lnrf\") pod \"infra-operator-index-snq4h\" (UID: \"420604f4-8485-4afa-a167-ca61c1c63981\") " pod="openstack-operators/infra-operator-index-snq4h" Nov 24 08:47:49 crc kubenswrapper[4718]: I1124 08:47:49.073793 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lnrf\" (UniqueName: \"kubernetes.io/projected/420604f4-8485-4afa-a167-ca61c1c63981-kube-api-access-6lnrf\") pod \"infra-operator-index-snq4h\" (UID: \"420604f4-8485-4afa-a167-ca61c1c63981\") " pod="openstack-operators/infra-operator-index-snq4h" Nov 24 08:47:49 crc kubenswrapper[4718]: I1124 08:47:49.199753 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-snq4h" Nov 24 08:47:49 crc kubenswrapper[4718]: I1124 08:47:49.205462 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-index-6gpnb" podUID="b2c159c8-289a-4525-a23a-a6d5b2af870d" containerName="registry-server" containerID="cri-o://48944647de160122051f0e0119c42ce610089f9c96c04e2c50f0216c60d66f31" gracePeriod=2 Nov 24 08:47:49 crc kubenswrapper[4718]: I1124 08:47:49.524616 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-6gpnb" Nov 24 08:47:49 crc kubenswrapper[4718]: I1124 08:47:49.594707 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-snq4h"] Nov 24 08:47:49 crc kubenswrapper[4718]: W1124 08:47:49.599936 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod420604f4_8485_4afa_a167_ca61c1c63981.slice/crio-bc6a38072c66a8d92af27d5d4257cda5c1faa87a5e19f615fca38b19ac170270 WatchSource:0}: Error finding container bc6a38072c66a8d92af27d5d4257cda5c1faa87a5e19f615fca38b19ac170270: Status 404 returned error can't find the container with id bc6a38072c66a8d92af27d5d4257cda5c1faa87a5e19f615fca38b19ac170270 Nov 24 08:47:49 crc kubenswrapper[4718]: I1124 08:47:49.651938 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46jbj\" (UniqueName: \"kubernetes.io/projected/b2c159c8-289a-4525-a23a-a6d5b2af870d-kube-api-access-46jbj\") pod \"b2c159c8-289a-4525-a23a-a6d5b2af870d\" (UID: \"b2c159c8-289a-4525-a23a-a6d5b2af870d\") " Nov 24 08:47:49 crc kubenswrapper[4718]: I1124 08:47:49.656326 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2c159c8-289a-4525-a23a-a6d5b2af870d-kube-api-access-46jbj" (OuterVolumeSpecName: "kube-api-access-46jbj") pod "b2c159c8-289a-4525-a23a-a6d5b2af870d" (UID: "b2c159c8-289a-4525-a23a-a6d5b2af870d"). InnerVolumeSpecName "kube-api-access-46jbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:47:49 crc kubenswrapper[4718]: I1124 08:47:49.753317 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46jbj\" (UniqueName: \"kubernetes.io/projected/b2c159c8-289a-4525-a23a-a6d5b2af870d-kube-api-access-46jbj\") on node \"crc\" DevicePath \"\"" Nov 24 08:47:50 crc kubenswrapper[4718]: I1124 08:47:50.212376 4718 generic.go:334] "Generic (PLEG): container finished" podID="b2c159c8-289a-4525-a23a-a6d5b2af870d" containerID="48944647de160122051f0e0119c42ce610089f9c96c04e2c50f0216c60d66f31" exitCode=0 Nov 24 08:47:50 crc kubenswrapper[4718]: I1124 08:47:50.212427 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-6gpnb" Nov 24 08:47:50 crc kubenswrapper[4718]: I1124 08:47:50.212444 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-6gpnb" event={"ID":"b2c159c8-289a-4525-a23a-a6d5b2af870d","Type":"ContainerDied","Data":"48944647de160122051f0e0119c42ce610089f9c96c04e2c50f0216c60d66f31"} Nov 24 08:47:50 crc kubenswrapper[4718]: I1124 08:47:50.212469 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-6gpnb" event={"ID":"b2c159c8-289a-4525-a23a-a6d5b2af870d","Type":"ContainerDied","Data":"01bde1401b96e2c2304713ed6f31c9acc8753c2ba3b3a3253cecd7960fcddecc"} Nov 24 08:47:50 crc kubenswrapper[4718]: I1124 08:47:50.212485 4718 scope.go:117] "RemoveContainer" containerID="48944647de160122051f0e0119c42ce610089f9c96c04e2c50f0216c60d66f31" Nov 24 08:47:50 crc kubenswrapper[4718]: I1124 08:47:50.214259 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-snq4h" event={"ID":"420604f4-8485-4afa-a167-ca61c1c63981","Type":"ContainerStarted","Data":"3c26a3fdeec686baabcd7e664965f29ff4119c5a93fe47afc6155d8851918791"} Nov 24 08:47:50 crc kubenswrapper[4718]: I1124 08:47:50.214298 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-snq4h" event={"ID":"420604f4-8485-4afa-a167-ca61c1c63981","Type":"ContainerStarted","Data":"bc6a38072c66a8d92af27d5d4257cda5c1faa87a5e19f615fca38b19ac170270"} Nov 24 08:47:50 crc kubenswrapper[4718]: I1124 08:47:50.225588 4718 scope.go:117] "RemoveContainer" containerID="48944647de160122051f0e0119c42ce610089f9c96c04e2c50f0216c60d66f31" Nov 24 08:47:50 crc kubenswrapper[4718]: E1124 08:47:50.225922 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48944647de160122051f0e0119c42ce610089f9c96c04e2c50f0216c60d66f31\": container with ID starting with 48944647de160122051f0e0119c42ce610089f9c96c04e2c50f0216c60d66f31 not found: ID does not exist" containerID="48944647de160122051f0e0119c42ce610089f9c96c04e2c50f0216c60d66f31" Nov 24 08:47:50 crc kubenswrapper[4718]: I1124 08:47:50.225951 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48944647de160122051f0e0119c42ce610089f9c96c04e2c50f0216c60d66f31"} err="failed to get container status \"48944647de160122051f0e0119c42ce610089f9c96c04e2c50f0216c60d66f31\": rpc error: code = NotFound desc = could not find container \"48944647de160122051f0e0119c42ce610089f9c96c04e2c50f0216c60d66f31\": container with ID starting with 48944647de160122051f0e0119c42ce610089f9c96c04e2c50f0216c60d66f31 not found: ID does not exist" Nov 24 08:47:50 crc kubenswrapper[4718]: I1124 08:47:50.234509 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-snq4h" podStartSLOduration=1.817328336 podStartE2EDuration="2.234492634s" podCreationTimestamp="2025-11-24 08:47:48 +0000 UTC" firstStartedPulling="2025-11-24 08:47:49.601906763 +0000 UTC m=+741.718197667" lastFinishedPulling="2025-11-24 08:47:50.019071061 +0000 UTC m=+742.135361965" observedRunningTime="2025-11-24 08:47:50.231039828 +0000 UTC m=+742.347330742" watchObservedRunningTime="2025-11-24 08:47:50.234492634 +0000 UTC m=+742.350783538" Nov 24 08:47:50 crc kubenswrapper[4718]: I1124 08:47:50.245450 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-6gpnb"] Nov 24 08:47:50 crc kubenswrapper[4718]: I1124 08:47:50.248683 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-index-6gpnb"] Nov 24 08:47:50 crc kubenswrapper[4718]: I1124 08:47:50.603350 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2c159c8-289a-4525-a23a-a6d5b2af870d" path="/var/lib/kubelet/pods/b2c159c8-289a-4525-a23a-a6d5b2af870d/volumes" Nov 24 08:47:52 crc kubenswrapper[4718]: I1124 08:47:52.044872 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:47:52 crc kubenswrapper[4718]: I1124 08:47:52.044941 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.396090 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw"] Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.396820 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" podUID="e91241fe-8061-4ab3-ac7a-5f3d58e01d5d" containerName="route-controller-manager" containerID="cri-o://c38e2e87fe4f684eab9d0a97d608f6388e9d6790118723eaabb0adfeea17f6b7" gracePeriod=30 Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.399324 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-n9vxz"] Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.399541 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" podUID="8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9" containerName="controller-manager" containerID="cri-o://03bd91ec750414e9c35275150682858ee2418aaf4bffa1214852e9a225939a71" gracePeriod=30 Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.807654 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.811864 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.871844 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-serving-cert\") pod \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.872234 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-config\") pod \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.872281 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-config\") pod \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\" (UID: \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\") " Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.872384 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-serving-cert\") pod \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\" (UID: \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\") " Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.872415 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8qqcl\" (UniqueName: \"kubernetes.io/projected/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-kube-api-access-8qqcl\") pod \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.872437 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-proxy-ca-bundles\") pod \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.872474 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fs6zk\" (UniqueName: \"kubernetes.io/projected/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-kube-api-access-fs6zk\") pod \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\" (UID: \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\") " Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.872514 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-client-ca\") pod \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\" (UID: \"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d\") " Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.872543 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-client-ca\") pod \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\" (UID: \"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9\") " Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.873470 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-client-ca" (OuterVolumeSpecName: "client-ca") pod "8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9" (UID: "8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.874140 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9" (UID: "8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.874350 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-config" (OuterVolumeSpecName: "config") pod "8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9" (UID: "8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.874492 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-config" (OuterVolumeSpecName: "config") pod "e91241fe-8061-4ab3-ac7a-5f3d58e01d5d" (UID: "e91241fe-8061-4ab3-ac7a-5f3d58e01d5d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.875032 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-client-ca" (OuterVolumeSpecName: "client-ca") pod "e91241fe-8061-4ab3-ac7a-5f3d58e01d5d" (UID: "e91241fe-8061-4ab3-ac7a-5f3d58e01d5d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.879709 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9" (UID: "8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.880297 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-kube-api-access-fs6zk" (OuterVolumeSpecName: "kube-api-access-fs6zk") pod "e91241fe-8061-4ab3-ac7a-5f3d58e01d5d" (UID: "e91241fe-8061-4ab3-ac7a-5f3d58e01d5d"). InnerVolumeSpecName "kube-api-access-fs6zk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.880458 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e91241fe-8061-4ab3-ac7a-5f3d58e01d5d" (UID: "e91241fe-8061-4ab3-ac7a-5f3d58e01d5d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.886271 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-kube-api-access-8qqcl" (OuterVolumeSpecName: "kube-api-access-8qqcl") pod "8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9" (UID: "8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9"). InnerVolumeSpecName "kube-api-access-8qqcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.974328 4718 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.974365 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8qqcl\" (UniqueName: \"kubernetes.io/projected/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-kube-api-access-8qqcl\") on node \"crc\" DevicePath \"\"" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.974377 4718 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.974390 4718 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-client-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.974401 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fs6zk\" (UniqueName: \"kubernetes.io/projected/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-kube-api-access-fs6zk\") on node \"crc\" DevicePath \"\"" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.974412 4718 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-client-ca\") on node \"crc\" DevicePath \"\"" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.974424 4718 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.974435 4718 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:47:58 crc kubenswrapper[4718]: I1124 08:47:58.974442 4718 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.200723 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/infra-operator-index-snq4h" Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.200765 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-index-snq4h" Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.234802 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/infra-operator-index-snq4h" Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.273407 4718 generic.go:334] "Generic (PLEG): container finished" podID="e91241fe-8061-4ab3-ac7a-5f3d58e01d5d" containerID="c38e2e87fe4f684eab9d0a97d608f6388e9d6790118723eaabb0adfeea17f6b7" exitCode=0 Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.273468 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" event={"ID":"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d","Type":"ContainerDied","Data":"c38e2e87fe4f684eab9d0a97d608f6388e9d6790118723eaabb0adfeea17f6b7"} Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.273492 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" event={"ID":"e91241fe-8061-4ab3-ac7a-5f3d58e01d5d","Type":"ContainerDied","Data":"31052415ddcc508782b1d034f07bbdbf1abb453f286df82d905bbe663a15d2c1"} Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.273510 4718 scope.go:117] "RemoveContainer" containerID="c38e2e87fe4f684eab9d0a97d608f6388e9d6790118723eaabb0adfeea17f6b7" Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.273619 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw" Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.275615 4718 generic.go:334] "Generic (PLEG): container finished" podID="8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9" containerID="03bd91ec750414e9c35275150682858ee2418aaf4bffa1214852e9a225939a71" exitCode=0 Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.276246 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.276319 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" event={"ID":"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9","Type":"ContainerDied","Data":"03bd91ec750414e9c35275150682858ee2418aaf4bffa1214852e9a225939a71"} Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.276392 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-n9vxz" event={"ID":"8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9","Type":"ContainerDied","Data":"8db2fe92bd6f0412853efebf74269e9cfe34996051bef28b80506052c8535d3a"} Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.300089 4718 scope.go:117] "RemoveContainer" containerID="c38e2e87fe4f684eab9d0a97d608f6388e9d6790118723eaabb0adfeea17f6b7" Nov 24 08:47:59 crc kubenswrapper[4718]: E1124 08:47:59.300566 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c38e2e87fe4f684eab9d0a97d608f6388e9d6790118723eaabb0adfeea17f6b7\": container with ID starting with c38e2e87fe4f684eab9d0a97d608f6388e9d6790118723eaabb0adfeea17f6b7 not found: ID does not exist" containerID="c38e2e87fe4f684eab9d0a97d608f6388e9d6790118723eaabb0adfeea17f6b7" Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.300614 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c38e2e87fe4f684eab9d0a97d608f6388e9d6790118723eaabb0adfeea17f6b7"} err="failed to get container status \"c38e2e87fe4f684eab9d0a97d608f6388e9d6790118723eaabb0adfeea17f6b7\": rpc error: code = NotFound desc = could not find container \"c38e2e87fe4f684eab9d0a97d608f6388e9d6790118723eaabb0adfeea17f6b7\": container with ID starting with c38e2e87fe4f684eab9d0a97d608f6388e9d6790118723eaabb0adfeea17f6b7 not found: ID does not exist" Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.300641 4718 scope.go:117] "RemoveContainer" containerID="03bd91ec750414e9c35275150682858ee2418aaf4bffa1214852e9a225939a71" Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.307750 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-n9vxz"] Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.312525 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-n9vxz"] Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.313798 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-index-snq4h" Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.320175 4718 scope.go:117] "RemoveContainer" containerID="03bd91ec750414e9c35275150682858ee2418aaf4bffa1214852e9a225939a71" Nov 24 08:47:59 crc kubenswrapper[4718]: E1124 08:47:59.320812 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03bd91ec750414e9c35275150682858ee2418aaf4bffa1214852e9a225939a71\": container with ID starting with 03bd91ec750414e9c35275150682858ee2418aaf4bffa1214852e9a225939a71 not found: ID does not exist" containerID="03bd91ec750414e9c35275150682858ee2418aaf4bffa1214852e9a225939a71" Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.320841 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03bd91ec750414e9c35275150682858ee2418aaf4bffa1214852e9a225939a71"} err="failed to get container status \"03bd91ec750414e9c35275150682858ee2418aaf4bffa1214852e9a225939a71\": rpc error: code = NotFound desc = could not find container \"03bd91ec750414e9c35275150682858ee2418aaf4bffa1214852e9a225939a71\": container with ID starting with 03bd91ec750414e9c35275150682858ee2418aaf4bffa1214852e9a225939a71 not found: ID does not exist" Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.323864 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw"] Nov 24 08:47:59 crc kubenswrapper[4718]: I1124 08:47:59.328201 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nmrnw"] Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.206572 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5db44f4488-nzpz2"] Nov 24 08:48:00 crc kubenswrapper[4718]: E1124 08:48:00.206888 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9" containerName="controller-manager" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.206910 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9" containerName="controller-manager" Nov 24 08:48:00 crc kubenswrapper[4718]: E1124 08:48:00.206923 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e91241fe-8061-4ab3-ac7a-5f3d58e01d5d" containerName="route-controller-manager" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.206929 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="e91241fe-8061-4ab3-ac7a-5f3d58e01d5d" containerName="route-controller-manager" Nov 24 08:48:00 crc kubenswrapper[4718]: E1124 08:48:00.206940 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2c159c8-289a-4525-a23a-a6d5b2af870d" containerName="registry-server" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.206953 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2c159c8-289a-4525-a23a-a6d5b2af870d" containerName="registry-server" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.213217 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="e91241fe-8061-4ab3-ac7a-5f3d58e01d5d" containerName="route-controller-manager" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.213283 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9" containerName="controller-manager" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.213310 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2c159c8-289a-4525-a23a-a6d5b2af870d" containerName="registry-server" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.213908 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn"] Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.215432 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.215776 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.218923 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.220594 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.220849 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.220871 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.221458 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.221516 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.221754 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.222036 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.222733 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.222749 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.223011 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.223043 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.233515 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.237547 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn"] Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.242014 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5db44f4488-nzpz2"] Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.289888 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87d88\" (UniqueName: \"kubernetes.io/projected/cf845a2a-70d3-4ecd-b231-e38647dbf102-kube-api-access-87d88\") pod \"controller-manager-5db44f4488-nzpz2\" (UID: \"cf845a2a-70d3-4ecd-b231-e38647dbf102\") " pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.289963 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9cc48cc9-d3d1-4661-bb57-5d012bf633ca-serving-cert\") pod \"route-controller-manager-6f6c89966f-5zgvn\" (UID: \"9cc48cc9-d3d1-4661-bb57-5d012bf633ca\") " pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.290021 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzklw\" (UniqueName: \"kubernetes.io/projected/9cc48cc9-d3d1-4661-bb57-5d012bf633ca-kube-api-access-tzklw\") pod \"route-controller-manager-6f6c89966f-5zgvn\" (UID: \"9cc48cc9-d3d1-4661-bb57-5d012bf633ca\") " pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.290073 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf845a2a-70d3-4ecd-b231-e38647dbf102-config\") pod \"controller-manager-5db44f4488-nzpz2\" (UID: \"cf845a2a-70d3-4ecd-b231-e38647dbf102\") " pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.290113 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cf845a2a-70d3-4ecd-b231-e38647dbf102-client-ca\") pod \"controller-manager-5db44f4488-nzpz2\" (UID: \"cf845a2a-70d3-4ecd-b231-e38647dbf102\") " pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.290151 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cc48cc9-d3d1-4661-bb57-5d012bf633ca-config\") pod \"route-controller-manager-6f6c89966f-5zgvn\" (UID: \"9cc48cc9-d3d1-4661-bb57-5d012bf633ca\") " pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.290175 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9cc48cc9-d3d1-4661-bb57-5d012bf633ca-client-ca\") pod \"route-controller-manager-6f6c89966f-5zgvn\" (UID: \"9cc48cc9-d3d1-4661-bb57-5d012bf633ca\") " pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.290361 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cf845a2a-70d3-4ecd-b231-e38647dbf102-proxy-ca-bundles\") pod \"controller-manager-5db44f4488-nzpz2\" (UID: \"cf845a2a-70d3-4ecd-b231-e38647dbf102\") " pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.290477 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf845a2a-70d3-4ecd-b231-e38647dbf102-serving-cert\") pod \"controller-manager-5db44f4488-nzpz2\" (UID: \"cf845a2a-70d3-4ecd-b231-e38647dbf102\") " pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.391532 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9cc48cc9-d3d1-4661-bb57-5d012bf633ca-client-ca\") pod \"route-controller-manager-6f6c89966f-5zgvn\" (UID: \"9cc48cc9-d3d1-4661-bb57-5d012bf633ca\") " pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.391611 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cf845a2a-70d3-4ecd-b231-e38647dbf102-proxy-ca-bundles\") pod \"controller-manager-5db44f4488-nzpz2\" (UID: \"cf845a2a-70d3-4ecd-b231-e38647dbf102\") " pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.391657 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf845a2a-70d3-4ecd-b231-e38647dbf102-serving-cert\") pod \"controller-manager-5db44f4488-nzpz2\" (UID: \"cf845a2a-70d3-4ecd-b231-e38647dbf102\") " pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.391697 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87d88\" (UniqueName: \"kubernetes.io/projected/cf845a2a-70d3-4ecd-b231-e38647dbf102-kube-api-access-87d88\") pod \"controller-manager-5db44f4488-nzpz2\" (UID: \"cf845a2a-70d3-4ecd-b231-e38647dbf102\") " pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.391722 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9cc48cc9-d3d1-4661-bb57-5d012bf633ca-serving-cert\") pod \"route-controller-manager-6f6c89966f-5zgvn\" (UID: \"9cc48cc9-d3d1-4661-bb57-5d012bf633ca\") " pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.391749 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzklw\" (UniqueName: \"kubernetes.io/projected/9cc48cc9-d3d1-4661-bb57-5d012bf633ca-kube-api-access-tzklw\") pod \"route-controller-manager-6f6c89966f-5zgvn\" (UID: \"9cc48cc9-d3d1-4661-bb57-5d012bf633ca\") " pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.391792 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf845a2a-70d3-4ecd-b231-e38647dbf102-config\") pod \"controller-manager-5db44f4488-nzpz2\" (UID: \"cf845a2a-70d3-4ecd-b231-e38647dbf102\") " pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.391828 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cf845a2a-70d3-4ecd-b231-e38647dbf102-client-ca\") pod \"controller-manager-5db44f4488-nzpz2\" (UID: \"cf845a2a-70d3-4ecd-b231-e38647dbf102\") " pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.391864 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cc48cc9-d3d1-4661-bb57-5d012bf633ca-config\") pod \"route-controller-manager-6f6c89966f-5zgvn\" (UID: \"9cc48cc9-d3d1-4661-bb57-5d012bf633ca\") " pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.393183 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cc48cc9-d3d1-4661-bb57-5d012bf633ca-config\") pod \"route-controller-manager-6f6c89966f-5zgvn\" (UID: \"9cc48cc9-d3d1-4661-bb57-5d012bf633ca\") " pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.393201 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cf845a2a-70d3-4ecd-b231-e38647dbf102-client-ca\") pod \"controller-manager-5db44f4488-nzpz2\" (UID: \"cf845a2a-70d3-4ecd-b231-e38647dbf102\") " pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.393320 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf845a2a-70d3-4ecd-b231-e38647dbf102-config\") pod \"controller-manager-5db44f4488-nzpz2\" (UID: \"cf845a2a-70d3-4ecd-b231-e38647dbf102\") " pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.393355 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cf845a2a-70d3-4ecd-b231-e38647dbf102-proxy-ca-bundles\") pod \"controller-manager-5db44f4488-nzpz2\" (UID: \"cf845a2a-70d3-4ecd-b231-e38647dbf102\") " pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.393384 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9cc48cc9-d3d1-4661-bb57-5d012bf633ca-client-ca\") pod \"route-controller-manager-6f6c89966f-5zgvn\" (UID: \"9cc48cc9-d3d1-4661-bb57-5d012bf633ca\") " pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.396083 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf845a2a-70d3-4ecd-b231-e38647dbf102-serving-cert\") pod \"controller-manager-5db44f4488-nzpz2\" (UID: \"cf845a2a-70d3-4ecd-b231-e38647dbf102\") " pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.396151 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9cc48cc9-d3d1-4661-bb57-5d012bf633ca-serving-cert\") pod \"route-controller-manager-6f6c89966f-5zgvn\" (UID: \"9cc48cc9-d3d1-4661-bb57-5d012bf633ca\") " pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.413531 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87d88\" (UniqueName: \"kubernetes.io/projected/cf845a2a-70d3-4ecd-b231-e38647dbf102-kube-api-access-87d88\") pod \"controller-manager-5db44f4488-nzpz2\" (UID: \"cf845a2a-70d3-4ecd-b231-e38647dbf102\") " pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.415177 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzklw\" (UniqueName: \"kubernetes.io/projected/9cc48cc9-d3d1-4661-bb57-5d012bf633ca-kube-api-access-tzklw\") pod \"route-controller-manager-6f6c89966f-5zgvn\" (UID: \"9cc48cc9-d3d1-4661-bb57-5d012bf633ca\") " pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.542673 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.558256 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.604330 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9" path="/var/lib/kubelet/pods/8fc804cb-ad5b-4f6a-a0ca-1a152a7eede9/volumes" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.604990 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e91241fe-8061-4ab3-ac7a-5f3d58e01d5d" path="/var/lib/kubelet/pods/e91241fe-8061-4ab3-ac7a-5f3d58e01d5d/volumes" Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.818184 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn"] Nov 24 08:48:00 crc kubenswrapper[4718]: W1124 08:48:00.825273 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9cc48cc9_d3d1_4661_bb57_5d012bf633ca.slice/crio-53947e9d9b4132661caeda01b61df654a9e8235904256f14daf4ca318ea1c581 WatchSource:0}: Error finding container 53947e9d9b4132661caeda01b61df654a9e8235904256f14daf4ca318ea1c581: Status 404 returned error can't find the container with id 53947e9d9b4132661caeda01b61df654a9e8235904256f14daf4ca318ea1c581 Nov 24 08:48:00 crc kubenswrapper[4718]: I1124 08:48:00.962685 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5db44f4488-nzpz2"] Nov 24 08:48:00 crc kubenswrapper[4718]: W1124 08:48:00.968161 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf845a2a_70d3_4ecd_b231_e38647dbf102.slice/crio-85c5b0ebeb8c6b87cf0fbbbfa7cd8bd76b17abd0add116024999e11d027e48ae WatchSource:0}: Error finding container 85c5b0ebeb8c6b87cf0fbbbfa7cd8bd76b17abd0add116024999e11d027e48ae: Status 404 returned error can't find the container with id 85c5b0ebeb8c6b87cf0fbbbfa7cd8bd76b17abd0add116024999e11d027e48ae Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.293503 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" event={"ID":"cf845a2a-70d3-4ecd-b231-e38647dbf102","Type":"ContainerStarted","Data":"14ad24883c22feb9e3c29afde24a47bdf00910bf889a8a28126fc977103afff0"} Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.293779 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" event={"ID":"cf845a2a-70d3-4ecd-b231-e38647dbf102","Type":"ContainerStarted","Data":"85c5b0ebeb8c6b87cf0fbbbfa7cd8bd76b17abd0add116024999e11d027e48ae"} Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.294929 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.297629 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" event={"ID":"9cc48cc9-d3d1-4661-bb57-5d012bf633ca","Type":"ContainerStarted","Data":"042915a04bf2339455950b6d0f215cf935f547b08ccdffb9945b814a8bc492b6"} Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.297675 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" event={"ID":"9cc48cc9-d3d1-4661-bb57-5d012bf633ca","Type":"ContainerStarted","Data":"53947e9d9b4132661caeda01b61df654a9e8235904256f14daf4ca318ea1c581"} Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.298336 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.308489 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.355772 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" podStartSLOduration=3.355758474 podStartE2EDuration="3.355758474s" podCreationTimestamp="2025-11-24 08:47:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:48:01.353552889 +0000 UTC m=+753.469843793" watchObservedRunningTime="2025-11-24 08:48:01.355758474 +0000 UTC m=+753.472049378" Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.356685 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5db44f4488-nzpz2" podStartSLOduration=3.356680516 podStartE2EDuration="3.356680516s" podCreationTimestamp="2025-11-24 08:47:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:48:01.332731124 +0000 UTC m=+753.449022028" watchObservedRunningTime="2025-11-24 08:48:01.356680516 +0000 UTC m=+753.472971410" Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.441874 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6f6c89966f-5zgvn" Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.719018 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb"] Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.720092 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb" Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.721747 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-fq6vw" Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.732186 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb"] Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.808185 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4c50fe6a-f5ce-4cc1-8044-a378ec1b740b-util\") pod \"ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb\" (UID: \"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b\") " pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb" Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.808370 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4c50fe6a-f5ce-4cc1-8044-a378ec1b740b-bundle\") pod \"ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb\" (UID: \"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b\") " pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb" Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.808469 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9hs4\" (UniqueName: \"kubernetes.io/projected/4c50fe6a-f5ce-4cc1-8044-a378ec1b740b-kube-api-access-f9hs4\") pod \"ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb\" (UID: \"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b\") " pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb" Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.910014 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9hs4\" (UniqueName: \"kubernetes.io/projected/4c50fe6a-f5ce-4cc1-8044-a378ec1b740b-kube-api-access-f9hs4\") pod \"ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb\" (UID: \"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b\") " pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb" Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.910078 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4c50fe6a-f5ce-4cc1-8044-a378ec1b740b-util\") pod \"ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb\" (UID: \"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b\") " pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb" Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.910138 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4c50fe6a-f5ce-4cc1-8044-a378ec1b740b-bundle\") pod \"ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb\" (UID: \"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b\") " pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb" Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.910554 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4c50fe6a-f5ce-4cc1-8044-a378ec1b740b-bundle\") pod \"ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb\" (UID: \"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b\") " pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb" Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.910785 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4c50fe6a-f5ce-4cc1-8044-a378ec1b740b-util\") pod \"ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb\" (UID: \"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b\") " pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb" Nov 24 08:48:01 crc kubenswrapper[4718]: I1124 08:48:01.928901 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9hs4\" (UniqueName: \"kubernetes.io/projected/4c50fe6a-f5ce-4cc1-8044-a378ec1b740b-kube-api-access-f9hs4\") pod \"ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb\" (UID: \"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b\") " pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb" Nov 24 08:48:02 crc kubenswrapper[4718]: I1124 08:48:02.034373 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb" Nov 24 08:48:02 crc kubenswrapper[4718]: I1124 08:48:02.425215 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb"] Nov 24 08:48:02 crc kubenswrapper[4718]: W1124 08:48:02.439071 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c50fe6a_f5ce_4cc1_8044_a378ec1b740b.slice/crio-529e5cb6c4a936b0b8c535f399b0cb6f206a82d667a3d24da89c12f9a72c9fe4 WatchSource:0}: Error finding container 529e5cb6c4a936b0b8c535f399b0cb6f206a82d667a3d24da89c12f9a72c9fe4: Status 404 returned error can't find the container with id 529e5cb6c4a936b0b8c535f399b0cb6f206a82d667a3d24da89c12f9a72c9fe4 Nov 24 08:48:03 crc kubenswrapper[4718]: I1124 08:48:03.309403 4718 generic.go:334] "Generic (PLEG): container finished" podID="4c50fe6a-f5ce-4cc1-8044-a378ec1b740b" containerID="c488c6e17a991e26d6e46b56390c27e156025b5fec901bd0adaa0640bceb647c" exitCode=0 Nov 24 08:48:03 crc kubenswrapper[4718]: I1124 08:48:03.309460 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb" event={"ID":"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b","Type":"ContainerDied","Data":"c488c6e17a991e26d6e46b56390c27e156025b5fec901bd0adaa0640bceb647c"} Nov 24 08:48:03 crc kubenswrapper[4718]: I1124 08:48:03.309736 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb" event={"ID":"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b","Type":"ContainerStarted","Data":"529e5cb6c4a936b0b8c535f399b0cb6f206a82d667a3d24da89c12f9a72c9fe4"} Nov 24 08:48:04 crc kubenswrapper[4718]: I1124 08:48:04.315591 4718 generic.go:334] "Generic (PLEG): container finished" podID="4c50fe6a-f5ce-4cc1-8044-a378ec1b740b" containerID="d6505d5048a791bbd005e2f487ea11c4fa137c914a50bd41948f29de65e495d2" exitCode=0 Nov 24 08:48:04 crc kubenswrapper[4718]: I1124 08:48:04.315660 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb" event={"ID":"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b","Type":"ContainerDied","Data":"d6505d5048a791bbd005e2f487ea11c4fa137c914a50bd41948f29de65e495d2"} Nov 24 08:48:04 crc kubenswrapper[4718]: I1124 08:48:04.571951 4718 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 24 08:48:05 crc kubenswrapper[4718]: I1124 08:48:05.324997 4718 generic.go:334] "Generic (PLEG): container finished" podID="4c50fe6a-f5ce-4cc1-8044-a378ec1b740b" containerID="cab5bd6053f147c5365ebf76a4658f47aaa702d7beea2c41fbae0605fabc3b8b" exitCode=0 Nov 24 08:48:05 crc kubenswrapper[4718]: I1124 08:48:05.325039 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb" event={"ID":"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b","Type":"ContainerDied","Data":"cab5bd6053f147c5365ebf76a4658f47aaa702d7beea2c41fbae0605fabc3b8b"} Nov 24 08:48:06 crc kubenswrapper[4718]: I1124 08:48:06.715255 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb" Nov 24 08:48:06 crc kubenswrapper[4718]: I1124 08:48:06.807365 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4c50fe6a-f5ce-4cc1-8044-a378ec1b740b-util\") pod \"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b\" (UID: \"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b\") " Nov 24 08:48:06 crc kubenswrapper[4718]: I1124 08:48:06.807721 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f9hs4\" (UniqueName: \"kubernetes.io/projected/4c50fe6a-f5ce-4cc1-8044-a378ec1b740b-kube-api-access-f9hs4\") pod \"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b\" (UID: \"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b\") " Nov 24 08:48:06 crc kubenswrapper[4718]: I1124 08:48:06.807772 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4c50fe6a-f5ce-4cc1-8044-a378ec1b740b-bundle\") pod \"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b\" (UID: \"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b\") " Nov 24 08:48:06 crc kubenswrapper[4718]: I1124 08:48:06.809570 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c50fe6a-f5ce-4cc1-8044-a378ec1b740b-bundle" (OuterVolumeSpecName: "bundle") pod "4c50fe6a-f5ce-4cc1-8044-a378ec1b740b" (UID: "4c50fe6a-f5ce-4cc1-8044-a378ec1b740b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:48:06 crc kubenswrapper[4718]: I1124 08:48:06.813457 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c50fe6a-f5ce-4cc1-8044-a378ec1b740b-kube-api-access-f9hs4" (OuterVolumeSpecName: "kube-api-access-f9hs4") pod "4c50fe6a-f5ce-4cc1-8044-a378ec1b740b" (UID: "4c50fe6a-f5ce-4cc1-8044-a378ec1b740b"). InnerVolumeSpecName "kube-api-access-f9hs4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:48:06 crc kubenswrapper[4718]: I1124 08:48:06.828088 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c50fe6a-f5ce-4cc1-8044-a378ec1b740b-util" (OuterVolumeSpecName: "util") pod "4c50fe6a-f5ce-4cc1-8044-a378ec1b740b" (UID: "4c50fe6a-f5ce-4cc1-8044-a378ec1b740b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:48:06 crc kubenswrapper[4718]: I1124 08:48:06.909623 4718 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4c50fe6a-f5ce-4cc1-8044-a378ec1b740b-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:48:06 crc kubenswrapper[4718]: I1124 08:48:06.909659 4718 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4c50fe6a-f5ce-4cc1-8044-a378ec1b740b-util\") on node \"crc\" DevicePath \"\"" Nov 24 08:48:06 crc kubenswrapper[4718]: I1124 08:48:06.909669 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f9hs4\" (UniqueName: \"kubernetes.io/projected/4c50fe6a-f5ce-4cc1-8044-a378ec1b740b-kube-api-access-f9hs4\") on node \"crc\" DevicePath \"\"" Nov 24 08:48:07 crc kubenswrapper[4718]: I1124 08:48:07.339000 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb" event={"ID":"4c50fe6a-f5ce-4cc1-8044-a378ec1b740b","Type":"ContainerDied","Data":"529e5cb6c4a936b0b8c535f399b0cb6f206a82d667a3d24da89c12f9a72c9fe4"} Nov 24 08:48:07 crc kubenswrapper[4718]: I1124 08:48:07.339043 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb" Nov 24 08:48:07 crc kubenswrapper[4718]: I1124 08:48:07.339056 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="529e5cb6c4a936b0b8c535f399b0cb6f206a82d667a3d24da89c12f9a72c9fe4" Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.434814 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h"] Nov 24 08:48:14 crc kubenswrapper[4718]: E1124 08:48:14.435681 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c50fe6a-f5ce-4cc1-8044-a378ec1b740b" containerName="extract" Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.435698 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c50fe6a-f5ce-4cc1-8044-a378ec1b740b" containerName="extract" Nov 24 08:48:14 crc kubenswrapper[4718]: E1124 08:48:14.435715 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c50fe6a-f5ce-4cc1-8044-a378ec1b740b" containerName="util" Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.435723 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c50fe6a-f5ce-4cc1-8044-a378ec1b740b" containerName="util" Nov 24 08:48:14 crc kubenswrapper[4718]: E1124 08:48:14.435738 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c50fe6a-f5ce-4cc1-8044-a378ec1b740b" containerName="pull" Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.435746 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c50fe6a-f5ce-4cc1-8044-a378ec1b740b" containerName="pull" Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.435871 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c50fe6a-f5ce-4cc1-8044-a378ec1b740b" containerName="extract" Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.436665 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h" Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.445724 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-service-cert" Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.445728 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-kxzns" Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.457267 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h"] Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.610779 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/84f2adaf-ef3d-45a9-b471-51b99a01773b-webhook-cert\") pod \"infra-operator-controller-manager-5f64d8d556-c775h\" (UID: \"84f2adaf-ef3d-45a9-b471-51b99a01773b\") " pod="openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h" Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.610830 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rv59\" (UniqueName: \"kubernetes.io/projected/84f2adaf-ef3d-45a9-b471-51b99a01773b-kube-api-access-8rv59\") pod \"infra-operator-controller-manager-5f64d8d556-c775h\" (UID: \"84f2adaf-ef3d-45a9-b471-51b99a01773b\") " pod="openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h" Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.610872 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/84f2adaf-ef3d-45a9-b471-51b99a01773b-apiservice-cert\") pod \"infra-operator-controller-manager-5f64d8d556-c775h\" (UID: \"84f2adaf-ef3d-45a9-b471-51b99a01773b\") " pod="openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h" Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.712246 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/84f2adaf-ef3d-45a9-b471-51b99a01773b-webhook-cert\") pod \"infra-operator-controller-manager-5f64d8d556-c775h\" (UID: \"84f2adaf-ef3d-45a9-b471-51b99a01773b\") " pod="openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h" Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.712319 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rv59\" (UniqueName: \"kubernetes.io/projected/84f2adaf-ef3d-45a9-b471-51b99a01773b-kube-api-access-8rv59\") pod \"infra-operator-controller-manager-5f64d8d556-c775h\" (UID: \"84f2adaf-ef3d-45a9-b471-51b99a01773b\") " pod="openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h" Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.712383 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/84f2adaf-ef3d-45a9-b471-51b99a01773b-apiservice-cert\") pod \"infra-operator-controller-manager-5f64d8d556-c775h\" (UID: \"84f2adaf-ef3d-45a9-b471-51b99a01773b\") " pod="openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h" Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.717302 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/84f2adaf-ef3d-45a9-b471-51b99a01773b-apiservice-cert\") pod \"infra-operator-controller-manager-5f64d8d556-c775h\" (UID: \"84f2adaf-ef3d-45a9-b471-51b99a01773b\") " pod="openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h" Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.717421 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/84f2adaf-ef3d-45a9-b471-51b99a01773b-webhook-cert\") pod \"infra-operator-controller-manager-5f64d8d556-c775h\" (UID: \"84f2adaf-ef3d-45a9-b471-51b99a01773b\") " pod="openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h" Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.731906 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rv59\" (UniqueName: \"kubernetes.io/projected/84f2adaf-ef3d-45a9-b471-51b99a01773b-kube-api-access-8rv59\") pod \"infra-operator-controller-manager-5f64d8d556-c775h\" (UID: \"84f2adaf-ef3d-45a9-b471-51b99a01773b\") " pod="openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h" Nov 24 08:48:14 crc kubenswrapper[4718]: I1124 08:48:14.756029 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h" Nov 24 08:48:15 crc kubenswrapper[4718]: I1124 08:48:15.219157 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h"] Nov 24 08:48:15 crc kubenswrapper[4718]: W1124 08:48:15.227519 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84f2adaf_ef3d_45a9_b471_51b99a01773b.slice/crio-2222d5837e054704a2b1e8961b8151827ceb0efebd25ef7854dbea59c3b1d28e WatchSource:0}: Error finding container 2222d5837e054704a2b1e8961b8151827ceb0efebd25ef7854dbea59c3b1d28e: Status 404 returned error can't find the container with id 2222d5837e054704a2b1e8961b8151827ceb0efebd25ef7854dbea59c3b1d28e Nov 24 08:48:15 crc kubenswrapper[4718]: I1124 08:48:15.398266 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h" event={"ID":"84f2adaf-ef3d-45a9-b471-51b99a01773b","Type":"ContainerStarted","Data":"2222d5837e054704a2b1e8961b8151827ceb0efebd25ef7854dbea59c3b1d28e"} Nov 24 08:48:17 crc kubenswrapper[4718]: I1124 08:48:17.413016 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h" event={"ID":"84f2adaf-ef3d-45a9-b471-51b99a01773b","Type":"ContainerStarted","Data":"290bd7f0c4fa82f5a1f6448bf9cee05329f7cc599b7546cf83c44838f80bf364"} Nov 24 08:48:17 crc kubenswrapper[4718]: I1124 08:48:17.413331 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h" event={"ID":"84f2adaf-ef3d-45a9-b471-51b99a01773b","Type":"ContainerStarted","Data":"8d13bb3b768610c3c6920a54c7b8240879070d85880a4cf17f3e2681b61a994d"} Nov 24 08:48:17 crc kubenswrapper[4718]: I1124 08:48:17.414773 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h" Nov 24 08:48:17 crc kubenswrapper[4718]: I1124 08:48:17.432821 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h" podStartSLOduration=1.778367453 podStartE2EDuration="3.432806083s" podCreationTimestamp="2025-11-24 08:48:14 +0000 UTC" firstStartedPulling="2025-11-24 08:48:15.230929581 +0000 UTC m=+767.347220485" lastFinishedPulling="2025-11-24 08:48:16.885368211 +0000 UTC m=+769.001659115" observedRunningTime="2025-11-24 08:48:17.432001693 +0000 UTC m=+769.548292597" watchObservedRunningTime="2025-11-24 08:48:17.432806083 +0000 UTC m=+769.549096987" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.502864 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/openstack-galera-0"] Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.504179 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.507067 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"kube-root-ca.crt" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.507322 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openstack-scripts" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.507569 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openshift-service-ca.crt" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.507786 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"galera-openstack-dockercfg-fcgsq" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.513319 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/openstack-galera-1"] Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.514543 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.515594 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openstack-config-data" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.520604 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/openstack-galera-2"] Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.522864 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.525599 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstack-galera-0"] Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.530320 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstack-galera-2"] Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.534999 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstack-galera-1"] Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.668304 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e129bbf2-f4d7-42c1-84b3-80338a6fafd4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") " pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.668366 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") " pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.668390 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2774aeea-619b-4e69-9927-95b17dcc9704-config-data-default\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") " pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.668417 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hcr2\" (UniqueName: \"kubernetes.io/projected/e129bbf2-f4d7-42c1-84b3-80338a6fafd4-kube-api-access-5hcr2\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") " pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.668448 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2774aeea-619b-4e69-9927-95b17dcc9704-operator-scripts\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") " pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.668485 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e129bbf2-f4d7-42c1-84b3-80338a6fafd4-config-data-default\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") " pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.668504 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3e87139f-12e2-45d7-8401-ae56813c9829-kolla-config\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") " pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.668525 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e129bbf2-f4d7-42c1-84b3-80338a6fafd4-kolla-config\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") " pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.668547 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e129bbf2-f4d7-42c1-84b3-80338a6fafd4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") " pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.668571 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2774aeea-619b-4e69-9927-95b17dcc9704-config-data-generated\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") " pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.668597 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khv6x\" (UniqueName: \"kubernetes.io/projected/3e87139f-12e2-45d7-8401-ae56813c9829-kube-api-access-khv6x\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") " pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.668621 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e87139f-12e2-45d7-8401-ae56813c9829-operator-scripts\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") " pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.668653 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3e87139f-12e2-45d7-8401-ae56813c9829-config-data-default\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") " pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.668681 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2774aeea-619b-4e69-9927-95b17dcc9704-kolla-config\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") " pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.668698 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") " pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.668713 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") " pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.668733 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3e87139f-12e2-45d7-8401-ae56813c9829-config-data-generated\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") " pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.668753 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6gg8\" (UniqueName: \"kubernetes.io/projected/2774aeea-619b-4e69-9927-95b17dcc9704-kube-api-access-k6gg8\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") " pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.769655 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") " pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770032 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3e87139f-12e2-45d7-8401-ae56813c9829-config-data-generated\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") " pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770052 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6gg8\" (UniqueName: \"kubernetes.io/projected/2774aeea-619b-4e69-9927-95b17dcc9704-kube-api-access-k6gg8\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") " pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.769982 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") device mount path \"/mnt/openstack/pv11\"" pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770354 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e129bbf2-f4d7-42c1-84b3-80338a6fafd4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") " pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770436 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3e87139f-12e2-45d7-8401-ae56813c9829-config-data-generated\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") " pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770440 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") " pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770469 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2774aeea-619b-4e69-9927-95b17dcc9704-config-data-default\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") " pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770520 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hcr2\" (UniqueName: \"kubernetes.io/projected/e129bbf2-f4d7-42c1-84b3-80338a6fafd4-kube-api-access-5hcr2\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") " pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770566 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2774aeea-619b-4e69-9927-95b17dcc9704-operator-scripts\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") " pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770647 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e129bbf2-f4d7-42c1-84b3-80338a6fafd4-config-data-default\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") " pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770676 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3e87139f-12e2-45d7-8401-ae56813c9829-kolla-config\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") " pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770694 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e129bbf2-f4d7-42c1-84b3-80338a6fafd4-kolla-config\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") " pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770723 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e129bbf2-f4d7-42c1-84b3-80338a6fafd4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") " pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770740 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2774aeea-619b-4e69-9927-95b17dcc9704-config-data-generated\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") " pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770782 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khv6x\" (UniqueName: \"kubernetes.io/projected/3e87139f-12e2-45d7-8401-ae56813c9829-kube-api-access-khv6x\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") " pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770803 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e87139f-12e2-45d7-8401-ae56813c9829-operator-scripts\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") " pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770863 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3e87139f-12e2-45d7-8401-ae56813c9829-config-data-default\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") " pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770915 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2774aeea-619b-4e69-9927-95b17dcc9704-kolla-config\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") " pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770937 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") " pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.771057 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") device mount path \"/mnt/openstack/pv01\"" pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.771429 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e129bbf2-f4d7-42c1-84b3-80338a6fafd4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") " pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.771576 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e129bbf2-f4d7-42c1-84b3-80338a6fafd4-kolla-config\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") " pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.771643 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2774aeea-619b-4e69-9927-95b17dcc9704-config-data-default\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") " pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.770644 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") device mount path \"/mnt/openstack/pv03\"" pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.771847 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2774aeea-619b-4e69-9927-95b17dcc9704-config-data-generated\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") " pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.772575 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e129bbf2-f4d7-42c1-84b3-80338a6fafd4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") " pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.772850 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3e87139f-12e2-45d7-8401-ae56813c9829-kolla-config\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") " pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.773337 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2774aeea-619b-4e69-9927-95b17dcc9704-kolla-config\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") " pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.773375 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e129bbf2-f4d7-42c1-84b3-80338a6fafd4-config-data-default\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") " pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.774479 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2774aeea-619b-4e69-9927-95b17dcc9704-operator-scripts\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") " pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.774634 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3e87139f-12e2-45d7-8401-ae56813c9829-config-data-default\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") " pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.775309 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e87139f-12e2-45d7-8401-ae56813c9829-operator-scripts\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") " pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.790047 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") " pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.791576 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") " pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.793813 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hcr2\" (UniqueName: \"kubernetes.io/projected/e129bbf2-f4d7-42c1-84b3-80338a6fafd4-kube-api-access-5hcr2\") pod \"openstack-galera-0\" (UID: \"e129bbf2-f4d7-42c1-84b3-80338a6fafd4\") " pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.802448 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") " pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.814930 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khv6x\" (UniqueName: \"kubernetes.io/projected/3e87139f-12e2-45d7-8401-ae56813c9829-kube-api-access-khv6x\") pod \"openstack-galera-1\" (UID: \"3e87139f-12e2-45d7-8401-ae56813c9829\") " pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.815290 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6gg8\" (UniqueName: \"kubernetes.io/projected/2774aeea-619b-4e69-9927-95b17dcc9704-kube-api-access-k6gg8\") pod \"openstack-galera-2\" (UID: \"2774aeea-619b-4e69-9927-95b17dcc9704\") " pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.826357 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.844165 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:18 crc kubenswrapper[4718]: I1124 08:48:18.852598 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:19 crc kubenswrapper[4718]: I1124 08:48:19.283341 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstack-galera-2"] Nov 24 08:48:19 crc kubenswrapper[4718]: I1124 08:48:19.286577 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstack-galera-1"] Nov 24 08:48:19 crc kubenswrapper[4718]: W1124 08:48:19.289599 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2774aeea_619b_4e69_9927_95b17dcc9704.slice/crio-9ed38c3810e9264dcd48a4a05895be421075c569f4425374476e596d0a4c97f4 WatchSource:0}: Error finding container 9ed38c3810e9264dcd48a4a05895be421075c569f4425374476e596d0a4c97f4: Status 404 returned error can't find the container with id 9ed38c3810e9264dcd48a4a05895be421075c569f4425374476e596d0a4c97f4 Nov 24 08:48:19 crc kubenswrapper[4718]: W1124 08:48:19.294377 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e87139f_12e2_45d7_8401_ae56813c9829.slice/crio-f52360d8c8efa032783fbe08db4376a803b1b3b0fb68aaccc0cb675f8b325ef8 WatchSource:0}: Error finding container f52360d8c8efa032783fbe08db4376a803b1b3b0fb68aaccc0cb675f8b325ef8: Status 404 returned error can't find the container with id f52360d8c8efa032783fbe08db4376a803b1b3b0fb68aaccc0cb675f8b325ef8 Nov 24 08:48:19 crc kubenswrapper[4718]: I1124 08:48:19.297765 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstack-galera-0"] Nov 24 08:48:19 crc kubenswrapper[4718]: W1124 08:48:19.303248 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode129bbf2_f4d7_42c1_84b3_80338a6fafd4.slice/crio-263a86e026e34de261d5292b076289472ee95e908e11ee30597a7b2af695ed3d WatchSource:0}: Error finding container 263a86e026e34de261d5292b076289472ee95e908e11ee30597a7b2af695ed3d: Status 404 returned error can't find the container with id 263a86e026e34de261d5292b076289472ee95e908e11ee30597a7b2af695ed3d Nov 24 08:48:19 crc kubenswrapper[4718]: I1124 08:48:19.424085 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-1" event={"ID":"3e87139f-12e2-45d7-8401-ae56813c9829","Type":"ContainerStarted","Data":"f52360d8c8efa032783fbe08db4376a803b1b3b0fb68aaccc0cb675f8b325ef8"} Nov 24 08:48:19 crc kubenswrapper[4718]: I1124 08:48:19.425253 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-0" event={"ID":"e129bbf2-f4d7-42c1-84b3-80338a6fafd4","Type":"ContainerStarted","Data":"263a86e026e34de261d5292b076289472ee95e908e11ee30597a7b2af695ed3d"} Nov 24 08:48:19 crc kubenswrapper[4718]: I1124 08:48:19.426025 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-2" event={"ID":"2774aeea-619b-4e69-9927-95b17dcc9704","Type":"ContainerStarted","Data":"9ed38c3810e9264dcd48a4a05895be421075c569f4425374476e596d0a4c97f4"} Nov 24 08:48:22 crc kubenswrapper[4718]: I1124 08:48:22.044927 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:48:22 crc kubenswrapper[4718]: I1124 08:48:22.045505 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:48:22 crc kubenswrapper[4718]: I1124 08:48:22.045576 4718 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:48:22 crc kubenswrapper[4718]: I1124 08:48:22.047363 4718 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"617141ef091b02db6eb1b54328e03850e47f6f2d095a10a3726c1cd67c78f520"} pod="openshift-machine-config-operator/machine-config-daemon-575gl" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 08:48:22 crc kubenswrapper[4718]: I1124 08:48:22.047491 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" containerID="cri-o://617141ef091b02db6eb1b54328e03850e47f6f2d095a10a3726c1cd67c78f520" gracePeriod=600 Nov 24 08:48:22 crc kubenswrapper[4718]: I1124 08:48:22.448064 4718 generic.go:334] "Generic (PLEG): container finished" podID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerID="617141ef091b02db6eb1b54328e03850e47f6f2d095a10a3726c1cd67c78f520" exitCode=0 Nov 24 08:48:22 crc kubenswrapper[4718]: I1124 08:48:22.448106 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerDied","Data":"617141ef091b02db6eb1b54328e03850e47f6f2d095a10a3726c1cd67c78f520"} Nov 24 08:48:22 crc kubenswrapper[4718]: I1124 08:48:22.448180 4718 scope.go:117] "RemoveContainer" containerID="873c6f9762288e9dfb0f0664bf7a56f9f72a8fa6abf831277ce0db85d93a114f" Nov 24 08:48:24 crc kubenswrapper[4718]: I1124 08:48:24.760574 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-5f64d8d556-c775h" Nov 24 08:48:27 crc kubenswrapper[4718]: I1124 08:48:27.490491 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-2" event={"ID":"2774aeea-619b-4e69-9927-95b17dcc9704","Type":"ContainerStarted","Data":"374292e29e08c610aa198a7296489cfb2653461896570ea733579da4a097917a"} Nov 24 08:48:27 crc kubenswrapper[4718]: I1124 08:48:27.493415 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerStarted","Data":"1a5f70c58a45eccf71c6de8475549daad92f17e19b44d32bf6a0b7edbca6ed9f"} Nov 24 08:48:27 crc kubenswrapper[4718]: I1124 08:48:27.494744 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-1" event={"ID":"3e87139f-12e2-45d7-8401-ae56813c9829","Type":"ContainerStarted","Data":"111ecb489b1bce0270c3dd2a41e038ef283d9efbb5b646cad2436698c9acf49f"} Nov 24 08:48:27 crc kubenswrapper[4718]: I1124 08:48:27.496674 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-0" event={"ID":"e129bbf2-f4d7-42c1-84b3-80338a6fafd4","Type":"ContainerStarted","Data":"e3a69bdaedbe4f3833172d98029c141ad9108f5fdb5f65accc197e99531cb21e"} Nov 24 08:48:29 crc kubenswrapper[4718]: I1124 08:48:29.927681 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/memcached-0"] Nov 24 08:48:29 crc kubenswrapper[4718]: I1124 08:48:29.929056 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/memcached-0" Nov 24 08:48:29 crc kubenswrapper[4718]: I1124 08:48:29.930756 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"memcached-config-data" Nov 24 08:48:29 crc kubenswrapper[4718]: I1124 08:48:29.931119 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"memcached-memcached-dockercfg-k9sjw" Nov 24 08:48:29 crc kubenswrapper[4718]: I1124 08:48:29.951662 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/memcached-0"] Nov 24 08:48:30 crc kubenswrapper[4718]: I1124 08:48:30.025417 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nr5wr\" (UniqueName: \"kubernetes.io/projected/b90b6ef5-488d-4524-9c45-ac92728bfb71-kube-api-access-nr5wr\") pod \"memcached-0\" (UID: \"b90b6ef5-488d-4524-9c45-ac92728bfb71\") " pod="glance-kuttl-tests/memcached-0" Nov 24 08:48:30 crc kubenswrapper[4718]: I1124 08:48:30.025495 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b90b6ef5-488d-4524-9c45-ac92728bfb71-config-data\") pod \"memcached-0\" (UID: \"b90b6ef5-488d-4524-9c45-ac92728bfb71\") " pod="glance-kuttl-tests/memcached-0" Nov 24 08:48:30 crc kubenswrapper[4718]: I1124 08:48:30.025523 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b90b6ef5-488d-4524-9c45-ac92728bfb71-kolla-config\") pod \"memcached-0\" (UID: \"b90b6ef5-488d-4524-9c45-ac92728bfb71\") " pod="glance-kuttl-tests/memcached-0" Nov 24 08:48:30 crc kubenswrapper[4718]: I1124 08:48:30.126628 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b90b6ef5-488d-4524-9c45-ac92728bfb71-config-data\") pod \"memcached-0\" (UID: \"b90b6ef5-488d-4524-9c45-ac92728bfb71\") " pod="glance-kuttl-tests/memcached-0" Nov 24 08:48:30 crc kubenswrapper[4718]: I1124 08:48:30.126696 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b90b6ef5-488d-4524-9c45-ac92728bfb71-kolla-config\") pod \"memcached-0\" (UID: \"b90b6ef5-488d-4524-9c45-ac92728bfb71\") " pod="glance-kuttl-tests/memcached-0" Nov 24 08:48:30 crc kubenswrapper[4718]: I1124 08:48:30.126774 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nr5wr\" (UniqueName: \"kubernetes.io/projected/b90b6ef5-488d-4524-9c45-ac92728bfb71-kube-api-access-nr5wr\") pod \"memcached-0\" (UID: \"b90b6ef5-488d-4524-9c45-ac92728bfb71\") " pod="glance-kuttl-tests/memcached-0" Nov 24 08:48:30 crc kubenswrapper[4718]: I1124 08:48:30.127624 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b90b6ef5-488d-4524-9c45-ac92728bfb71-kolla-config\") pod \"memcached-0\" (UID: \"b90b6ef5-488d-4524-9c45-ac92728bfb71\") " pod="glance-kuttl-tests/memcached-0" Nov 24 08:48:30 crc kubenswrapper[4718]: I1124 08:48:30.127771 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b90b6ef5-488d-4524-9c45-ac92728bfb71-config-data\") pod \"memcached-0\" (UID: \"b90b6ef5-488d-4524-9c45-ac92728bfb71\") " pod="glance-kuttl-tests/memcached-0" Nov 24 08:48:30 crc kubenswrapper[4718]: I1124 08:48:30.165108 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nr5wr\" (UniqueName: \"kubernetes.io/projected/b90b6ef5-488d-4524-9c45-ac92728bfb71-kube-api-access-nr5wr\") pod \"memcached-0\" (UID: \"b90b6ef5-488d-4524-9c45-ac92728bfb71\") " pod="glance-kuttl-tests/memcached-0" Nov 24 08:48:30 crc kubenswrapper[4718]: I1124 08:48:30.245773 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/memcached-0" Nov 24 08:48:30 crc kubenswrapper[4718]: I1124 08:48:30.793787 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/memcached-0"] Nov 24 08:48:31 crc kubenswrapper[4718]: I1124 08:48:31.529143 4718 generic.go:334] "Generic (PLEG): container finished" podID="e129bbf2-f4d7-42c1-84b3-80338a6fafd4" containerID="e3a69bdaedbe4f3833172d98029c141ad9108f5fdb5f65accc197e99531cb21e" exitCode=0 Nov 24 08:48:31 crc kubenswrapper[4718]: I1124 08:48:31.529254 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-0" event={"ID":"e129bbf2-f4d7-42c1-84b3-80338a6fafd4","Type":"ContainerDied","Data":"e3a69bdaedbe4f3833172d98029c141ad9108f5fdb5f65accc197e99531cb21e"} Nov 24 08:48:31 crc kubenswrapper[4718]: I1124 08:48:31.531925 4718 generic.go:334] "Generic (PLEG): container finished" podID="2774aeea-619b-4e69-9927-95b17dcc9704" containerID="374292e29e08c610aa198a7296489cfb2653461896570ea733579da4a097917a" exitCode=0 Nov 24 08:48:31 crc kubenswrapper[4718]: I1124 08:48:31.531990 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-2" event={"ID":"2774aeea-619b-4e69-9927-95b17dcc9704","Type":"ContainerDied","Data":"374292e29e08c610aa198a7296489cfb2653461896570ea733579da4a097917a"} Nov 24 08:48:31 crc kubenswrapper[4718]: I1124 08:48:31.533845 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/memcached-0" event={"ID":"b90b6ef5-488d-4524-9c45-ac92728bfb71","Type":"ContainerStarted","Data":"eddcfb5667045bd1b220f84bfea0efc1689ce29add2aa13ba3d9d8f045da16dc"} Nov 24 08:48:31 crc kubenswrapper[4718]: I1124 08:48:31.544061 4718 generic.go:334] "Generic (PLEG): container finished" podID="3e87139f-12e2-45d7-8401-ae56813c9829" containerID="111ecb489b1bce0270c3dd2a41e038ef283d9efbb5b646cad2436698c9acf49f" exitCode=0 Nov 24 08:48:31 crc kubenswrapper[4718]: I1124 08:48:31.544105 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-1" event={"ID":"3e87139f-12e2-45d7-8401-ae56813c9829","Type":"ContainerDied","Data":"111ecb489b1bce0270c3dd2a41e038ef283d9efbb5b646cad2436698c9acf49f"} Nov 24 08:48:32 crc kubenswrapper[4718]: I1124 08:48:32.552214 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-0" event={"ID":"e129bbf2-f4d7-42c1-84b3-80338a6fafd4","Type":"ContainerStarted","Data":"3e914dd1d04b95c2c5080ea9ccd80551e05e9cae8037e97c27414e9b48bc6127"} Nov 24 08:48:32 crc kubenswrapper[4718]: I1124 08:48:32.555635 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-2" event={"ID":"2774aeea-619b-4e69-9927-95b17dcc9704","Type":"ContainerStarted","Data":"d7596735ba23d16ebd6f11d520a1d31f418e78750b7e074bfcfd2edefda03ca5"} Nov 24 08:48:32 crc kubenswrapper[4718]: I1124 08:48:32.557719 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-1" event={"ID":"3e87139f-12e2-45d7-8401-ae56813c9829","Type":"ContainerStarted","Data":"351a82cf54c1c926d34d11e232e4b182c66805160cb4b8d73c5f5380a6df5f5a"} Nov 24 08:48:32 crc kubenswrapper[4718]: I1124 08:48:32.573241 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/openstack-galera-0" podStartSLOduration=8.039378974 podStartE2EDuration="15.573213376s" podCreationTimestamp="2025-11-24 08:48:17 +0000 UTC" firstStartedPulling="2025-11-24 08:48:19.305698521 +0000 UTC m=+771.421989425" lastFinishedPulling="2025-11-24 08:48:26.839532923 +0000 UTC m=+778.955823827" observedRunningTime="2025-11-24 08:48:32.568390427 +0000 UTC m=+784.684681331" watchObservedRunningTime="2025-11-24 08:48:32.573213376 +0000 UTC m=+784.689504290" Nov 24 08:48:32 crc kubenswrapper[4718]: I1124 08:48:32.591451 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/openstack-galera-1" podStartSLOduration=8.050823048 podStartE2EDuration="15.591428538s" podCreationTimestamp="2025-11-24 08:48:17 +0000 UTC" firstStartedPulling="2025-11-24 08:48:19.297417556 +0000 UTC m=+771.413708460" lastFinishedPulling="2025-11-24 08:48:26.838023026 +0000 UTC m=+778.954313950" observedRunningTime="2025-11-24 08:48:32.586814104 +0000 UTC m=+784.703105008" watchObservedRunningTime="2025-11-24 08:48:32.591428538 +0000 UTC m=+784.707719442" Nov 24 08:48:32 crc kubenswrapper[4718]: I1124 08:48:32.609276 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/openstack-galera-2" podStartSLOduration=8.099201057 podStartE2EDuration="15.609260781s" podCreationTimestamp="2025-11-24 08:48:17 +0000 UTC" firstStartedPulling="2025-11-24 08:48:19.292182726 +0000 UTC m=+771.408473630" lastFinishedPulling="2025-11-24 08:48:26.80224245 +0000 UTC m=+778.918533354" observedRunningTime="2025-11-24 08:48:32.606202842 +0000 UTC m=+784.722493746" watchObservedRunningTime="2025-11-24 08:48:32.609260781 +0000 UTC m=+784.725551685" Nov 24 08:48:32 crc kubenswrapper[4718]: I1124 08:48:32.685306 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-n6rwt"] Nov 24 08:48:32 crc kubenswrapper[4718]: I1124 08:48:32.686117 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-n6rwt" Nov 24 08:48:32 crc kubenswrapper[4718]: I1124 08:48:32.688361 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-index-dockercfg-84b5l" Nov 24 08:48:32 crc kubenswrapper[4718]: I1124 08:48:32.695496 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-n6rwt"] Nov 24 08:48:32 crc kubenswrapper[4718]: I1124 08:48:32.764170 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hxvm\" (UniqueName: \"kubernetes.io/projected/1b303474-4101-4aeb-b45d-5fd4c668b75c-kube-api-access-2hxvm\") pod \"rabbitmq-cluster-operator-index-n6rwt\" (UID: \"1b303474-4101-4aeb-b45d-5fd4c668b75c\") " pod="openstack-operators/rabbitmq-cluster-operator-index-n6rwt" Nov 24 08:48:32 crc kubenswrapper[4718]: I1124 08:48:32.865758 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hxvm\" (UniqueName: \"kubernetes.io/projected/1b303474-4101-4aeb-b45d-5fd4c668b75c-kube-api-access-2hxvm\") pod \"rabbitmq-cluster-operator-index-n6rwt\" (UID: \"1b303474-4101-4aeb-b45d-5fd4c668b75c\") " pod="openstack-operators/rabbitmq-cluster-operator-index-n6rwt" Nov 24 08:48:32 crc kubenswrapper[4718]: I1124 08:48:32.896372 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hxvm\" (UniqueName: \"kubernetes.io/projected/1b303474-4101-4aeb-b45d-5fd4c668b75c-kube-api-access-2hxvm\") pod \"rabbitmq-cluster-operator-index-n6rwt\" (UID: \"1b303474-4101-4aeb-b45d-5fd4c668b75c\") " pod="openstack-operators/rabbitmq-cluster-operator-index-n6rwt" Nov 24 08:48:33 crc kubenswrapper[4718]: I1124 08:48:33.010385 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-n6rwt" Nov 24 08:48:33 crc kubenswrapper[4718]: I1124 08:48:33.420834 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-n6rwt"] Nov 24 08:48:35 crc kubenswrapper[4718]: W1124 08:48:35.771159 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b303474_4101_4aeb_b45d_5fd4c668b75c.slice/crio-9d7f2d40ebe498f20ca6fa950ca8782ee2b730aac51439452be9b7b1300a1ce4 WatchSource:0}: Error finding container 9d7f2d40ebe498f20ca6fa950ca8782ee2b730aac51439452be9b7b1300a1ce4: Status 404 returned error can't find the container with id 9d7f2d40ebe498f20ca6fa950ca8782ee2b730aac51439452be9b7b1300a1ce4 Nov 24 08:48:36 crc kubenswrapper[4718]: I1124 08:48:36.586294 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-n6rwt" event={"ID":"1b303474-4101-4aeb-b45d-5fd4c668b75c","Type":"ContainerStarted","Data":"9d7f2d40ebe498f20ca6fa950ca8782ee2b730aac51439452be9b7b1300a1ce4"} Nov 24 08:48:37 crc kubenswrapper[4718]: I1124 08:48:37.593118 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/memcached-0" event={"ID":"b90b6ef5-488d-4524-9c45-ac92728bfb71","Type":"ContainerStarted","Data":"7e7b6d19cc45899bd3ad848fc21469ddc838bc5ee0e4f616b4a187902da25461"} Nov 24 08:48:37 crc kubenswrapper[4718]: I1124 08:48:37.593452 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/memcached-0" Nov 24 08:48:37 crc kubenswrapper[4718]: I1124 08:48:37.609591 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/memcached-0" podStartSLOduration=2.86520882 podStartE2EDuration="8.609574962s" podCreationTimestamp="2025-11-24 08:48:29 +0000 UTC" firstStartedPulling="2025-11-24 08:48:30.833663555 +0000 UTC m=+782.949954459" lastFinishedPulling="2025-11-24 08:48:36.578029697 +0000 UTC m=+788.694320601" observedRunningTime="2025-11-24 08:48:37.607014974 +0000 UTC m=+789.723305898" watchObservedRunningTime="2025-11-24 08:48:37.609574962 +0000 UTC m=+789.725865866" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.079803 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-n6rwt"] Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.288781 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-s4njf"] Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.294253 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s4njf" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.294843 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s4njf"] Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.449621 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zktd6\" (UniqueName: \"kubernetes.io/projected/b62f4d7b-76b5-402e-bb8b-b958d159f41a-kube-api-access-zktd6\") pod \"redhat-marketplace-s4njf\" (UID: \"b62f4d7b-76b5-402e-bb8b-b958d159f41a\") " pod="openshift-marketplace/redhat-marketplace-s4njf" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.449678 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b62f4d7b-76b5-402e-bb8b-b958d159f41a-utilities\") pod \"redhat-marketplace-s4njf\" (UID: \"b62f4d7b-76b5-402e-bb8b-b958d159f41a\") " pod="openshift-marketplace/redhat-marketplace-s4njf" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.449702 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b62f4d7b-76b5-402e-bb8b-b958d159f41a-catalog-content\") pod \"redhat-marketplace-s4njf\" (UID: \"b62f4d7b-76b5-402e-bb8b-b958d159f41a\") " pod="openshift-marketplace/redhat-marketplace-s4njf" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.551051 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zktd6\" (UniqueName: \"kubernetes.io/projected/b62f4d7b-76b5-402e-bb8b-b958d159f41a-kube-api-access-zktd6\") pod \"redhat-marketplace-s4njf\" (UID: \"b62f4d7b-76b5-402e-bb8b-b958d159f41a\") " pod="openshift-marketplace/redhat-marketplace-s4njf" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.551329 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b62f4d7b-76b5-402e-bb8b-b958d159f41a-utilities\") pod \"redhat-marketplace-s4njf\" (UID: \"b62f4d7b-76b5-402e-bb8b-b958d159f41a\") " pod="openshift-marketplace/redhat-marketplace-s4njf" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.551495 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b62f4d7b-76b5-402e-bb8b-b958d159f41a-catalog-content\") pod \"redhat-marketplace-s4njf\" (UID: \"b62f4d7b-76b5-402e-bb8b-b958d159f41a\") " pod="openshift-marketplace/redhat-marketplace-s4njf" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.551868 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b62f4d7b-76b5-402e-bb8b-b958d159f41a-utilities\") pod \"redhat-marketplace-s4njf\" (UID: \"b62f4d7b-76b5-402e-bb8b-b958d159f41a\") " pod="openshift-marketplace/redhat-marketplace-s4njf" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.551963 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b62f4d7b-76b5-402e-bb8b-b958d159f41a-catalog-content\") pod \"redhat-marketplace-s4njf\" (UID: \"b62f4d7b-76b5-402e-bb8b-b958d159f41a\") " pod="openshift-marketplace/redhat-marketplace-s4njf" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.572865 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zktd6\" (UniqueName: \"kubernetes.io/projected/b62f4d7b-76b5-402e-bb8b-b958d159f41a-kube-api-access-zktd6\") pod \"redhat-marketplace-s4njf\" (UID: \"b62f4d7b-76b5-402e-bb8b-b958d159f41a\") " pod="openshift-marketplace/redhat-marketplace-s4njf" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.670807 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s4njf" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.827826 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.827891 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.845549 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.845597 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.853725 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.853780 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.887419 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-vv2ld"] Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.888319 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-vv2ld" Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.901469 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-vv2ld"] Nov 24 08:48:38 crc kubenswrapper[4718]: I1124 08:48:38.948284 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:39 crc kubenswrapper[4718]: I1124 08:48:39.059143 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrd2z\" (UniqueName: \"kubernetes.io/projected/f6f10607-65c4-4502-8aac-b9f26461a142-kube-api-access-zrd2z\") pod \"rabbitmq-cluster-operator-index-vv2ld\" (UID: \"f6f10607-65c4-4502-8aac-b9f26461a142\") " pod="openstack-operators/rabbitmq-cluster-operator-index-vv2ld" Nov 24 08:48:39 crc kubenswrapper[4718]: I1124 08:48:39.160326 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrd2z\" (UniqueName: \"kubernetes.io/projected/f6f10607-65c4-4502-8aac-b9f26461a142-kube-api-access-zrd2z\") pod \"rabbitmq-cluster-operator-index-vv2ld\" (UID: \"f6f10607-65c4-4502-8aac-b9f26461a142\") " pod="openstack-operators/rabbitmq-cluster-operator-index-vv2ld" Nov 24 08:48:39 crc kubenswrapper[4718]: I1124 08:48:39.178062 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrd2z\" (UniqueName: \"kubernetes.io/projected/f6f10607-65c4-4502-8aac-b9f26461a142-kube-api-access-zrd2z\") pod \"rabbitmq-cluster-operator-index-vv2ld\" (UID: \"f6f10607-65c4-4502-8aac-b9f26461a142\") " pod="openstack-operators/rabbitmq-cluster-operator-index-vv2ld" Nov 24 08:48:39 crc kubenswrapper[4718]: I1124 08:48:39.215813 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-vv2ld" Nov 24 08:48:39 crc kubenswrapper[4718]: I1124 08:48:39.593568 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s4njf"] Nov 24 08:48:39 crc kubenswrapper[4718]: W1124 08:48:39.601928 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb62f4d7b_76b5_402e_bb8b_b958d159f41a.slice/crio-ea10faf6d51f81f16a20037b74af0ba4c1f02e010dbb22d25440907ea04bde73 WatchSource:0}: Error finding container ea10faf6d51f81f16a20037b74af0ba4c1f02e010dbb22d25440907ea04bde73: Status 404 returned error can't find the container with id ea10faf6d51f81f16a20037b74af0ba4c1f02e010dbb22d25440907ea04bde73 Nov 24 08:48:39 crc kubenswrapper[4718]: I1124 08:48:39.607048 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-n6rwt" event={"ID":"1b303474-4101-4aeb-b45d-5fd4c668b75c","Type":"ContainerStarted","Data":"beabf06be5f25ed895cda65c554256d7054a195e7dbc55f1f72accd95f3adfb1"} Nov 24 08:48:39 crc kubenswrapper[4718]: I1124 08:48:39.607116 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-index-n6rwt" podUID="1b303474-4101-4aeb-b45d-5fd4c668b75c" containerName="registry-server" containerID="cri-o://beabf06be5f25ed895cda65c554256d7054a195e7dbc55f1f72accd95f3adfb1" gracePeriod=2 Nov 24 08:48:39 crc kubenswrapper[4718]: I1124 08:48:39.609218 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s4njf" event={"ID":"b62f4d7b-76b5-402e-bb8b-b958d159f41a","Type":"ContainerStarted","Data":"ea10faf6d51f81f16a20037b74af0ba4c1f02e010dbb22d25440907ea04bde73"} Nov 24 08:48:39 crc kubenswrapper[4718]: I1124 08:48:39.634354 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-n6rwt" podStartSLOduration=4.194147968 podStartE2EDuration="7.634332652s" podCreationTimestamp="2025-11-24 08:48:32 +0000 UTC" firstStartedPulling="2025-11-24 08:48:35.774713454 +0000 UTC m=+787.891004358" lastFinishedPulling="2025-11-24 08:48:39.214898128 +0000 UTC m=+791.331189042" observedRunningTime="2025-11-24 08:48:39.623598249 +0000 UTC m=+791.739889143" watchObservedRunningTime="2025-11-24 08:48:39.634332652 +0000 UTC m=+791.750623556" Nov 24 08:48:39 crc kubenswrapper[4718]: I1124 08:48:39.684489 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/openstack-galera-2" Nov 24 08:48:39 crc kubenswrapper[4718]: I1124 08:48:39.690494 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-vv2ld"] Nov 24 08:48:39 crc kubenswrapper[4718]: E1124 08:48:39.970551 4718 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.220:47222->38.102.83.220:33633: write tcp 192.168.126.11:10250->192.168.126.11:48768: write: broken pipe Nov 24 08:48:40 crc kubenswrapper[4718]: I1124 08:48:40.404660 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-n6rwt" Nov 24 08:48:40 crc kubenswrapper[4718]: I1124 08:48:40.476730 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hxvm\" (UniqueName: \"kubernetes.io/projected/1b303474-4101-4aeb-b45d-5fd4c668b75c-kube-api-access-2hxvm\") pod \"1b303474-4101-4aeb-b45d-5fd4c668b75c\" (UID: \"1b303474-4101-4aeb-b45d-5fd4c668b75c\") " Nov 24 08:48:40 crc kubenswrapper[4718]: I1124 08:48:40.507216 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b303474-4101-4aeb-b45d-5fd4c668b75c-kube-api-access-2hxvm" (OuterVolumeSpecName: "kube-api-access-2hxvm") pod "1b303474-4101-4aeb-b45d-5fd4c668b75c" (UID: "1b303474-4101-4aeb-b45d-5fd4c668b75c"). InnerVolumeSpecName "kube-api-access-2hxvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:48:40 crc kubenswrapper[4718]: I1124 08:48:40.577912 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hxvm\" (UniqueName: \"kubernetes.io/projected/1b303474-4101-4aeb-b45d-5fd4c668b75c-kube-api-access-2hxvm\") on node \"crc\" DevicePath \"\"" Nov 24 08:48:40 crc kubenswrapper[4718]: I1124 08:48:40.616171 4718 generic.go:334] "Generic (PLEG): container finished" podID="b62f4d7b-76b5-402e-bb8b-b958d159f41a" containerID="0fe24d643c4c56ec175aeeec3e147993c7c8efacd7063e41e2fb11cede1d1bb4" exitCode=0 Nov 24 08:48:40 crc kubenswrapper[4718]: I1124 08:48:40.616603 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s4njf" event={"ID":"b62f4d7b-76b5-402e-bb8b-b958d159f41a","Type":"ContainerDied","Data":"0fe24d643c4c56ec175aeeec3e147993c7c8efacd7063e41e2fb11cede1d1bb4"} Nov 24 08:48:40 crc kubenswrapper[4718]: I1124 08:48:40.618624 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-vv2ld" event={"ID":"f6f10607-65c4-4502-8aac-b9f26461a142","Type":"ContainerStarted","Data":"2ae103ba8ee7862f87432a0d2650263c4a1b57af4fbf1adcc75a8e82cc2cbc9b"} Nov 24 08:48:40 crc kubenswrapper[4718]: I1124 08:48:40.618754 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-vv2ld" event={"ID":"f6f10607-65c4-4502-8aac-b9f26461a142","Type":"ContainerStarted","Data":"f30b86dc5114b044fcfe4ffc6705e2c56f1d2e6a48247168465d2729724fc83a"} Nov 24 08:48:40 crc kubenswrapper[4718]: I1124 08:48:40.628809 4718 generic.go:334] "Generic (PLEG): container finished" podID="1b303474-4101-4aeb-b45d-5fd4c668b75c" containerID="beabf06be5f25ed895cda65c554256d7054a195e7dbc55f1f72accd95f3adfb1" exitCode=0 Nov 24 08:48:40 crc kubenswrapper[4718]: I1124 08:48:40.628930 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-n6rwt" Nov 24 08:48:40 crc kubenswrapper[4718]: I1124 08:48:40.630820 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-n6rwt" event={"ID":"1b303474-4101-4aeb-b45d-5fd4c668b75c","Type":"ContainerDied","Data":"beabf06be5f25ed895cda65c554256d7054a195e7dbc55f1f72accd95f3adfb1"} Nov 24 08:48:40 crc kubenswrapper[4718]: I1124 08:48:40.630874 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-n6rwt" event={"ID":"1b303474-4101-4aeb-b45d-5fd4c668b75c","Type":"ContainerDied","Data":"9d7f2d40ebe498f20ca6fa950ca8782ee2b730aac51439452be9b7b1300a1ce4"} Nov 24 08:48:40 crc kubenswrapper[4718]: I1124 08:48:40.630905 4718 scope.go:117] "RemoveContainer" containerID="beabf06be5f25ed895cda65c554256d7054a195e7dbc55f1f72accd95f3adfb1" Nov 24 08:48:40 crc kubenswrapper[4718]: I1124 08:48:40.656404 4718 scope.go:117] "RemoveContainer" containerID="beabf06be5f25ed895cda65c554256d7054a195e7dbc55f1f72accd95f3adfb1" Nov 24 08:48:40 crc kubenswrapper[4718]: E1124 08:48:40.656988 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"beabf06be5f25ed895cda65c554256d7054a195e7dbc55f1f72accd95f3adfb1\": container with ID starting with beabf06be5f25ed895cda65c554256d7054a195e7dbc55f1f72accd95f3adfb1 not found: ID does not exist" containerID="beabf06be5f25ed895cda65c554256d7054a195e7dbc55f1f72accd95f3adfb1" Nov 24 08:48:40 crc kubenswrapper[4718]: I1124 08:48:40.657037 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"beabf06be5f25ed895cda65c554256d7054a195e7dbc55f1f72accd95f3adfb1"} err="failed to get container status \"beabf06be5f25ed895cda65c554256d7054a195e7dbc55f1f72accd95f3adfb1\": rpc error: code = NotFound desc = could not find container \"beabf06be5f25ed895cda65c554256d7054a195e7dbc55f1f72accd95f3adfb1\": container with ID starting with beabf06be5f25ed895cda65c554256d7054a195e7dbc55f1f72accd95f3adfb1 not found: ID does not exist" Nov 24 08:48:40 crc kubenswrapper[4718]: I1124 08:48:40.661703 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-vv2ld" podStartSLOduration=2.000119023 podStartE2EDuration="2.661670531s" podCreationTimestamp="2025-11-24 08:48:38 +0000 UTC" firstStartedPulling="2025-11-24 08:48:39.714119256 +0000 UTC m=+791.830410160" lastFinishedPulling="2025-11-24 08:48:40.375670764 +0000 UTC m=+792.491961668" observedRunningTime="2025-11-24 08:48:40.661358024 +0000 UTC m=+792.777648928" watchObservedRunningTime="2025-11-24 08:48:40.661670531 +0000 UTC m=+792.777961435" Nov 24 08:48:40 crc kubenswrapper[4718]: I1124 08:48:40.678437 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-n6rwt"] Nov 24 08:48:40 crc kubenswrapper[4718]: I1124 08:48:40.684092 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-n6rwt"] Nov 24 08:48:41 crc kubenswrapper[4718]: I1124 08:48:41.637369 4718 generic.go:334] "Generic (PLEG): container finished" podID="b62f4d7b-76b5-402e-bb8b-b958d159f41a" containerID="e992af35d8c6bcbdc46d4551b3585bc7784221b845a02f9f9770cc8214026e2b" exitCode=0 Nov 24 08:48:41 crc kubenswrapper[4718]: I1124 08:48:41.637412 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s4njf" event={"ID":"b62f4d7b-76b5-402e-bb8b-b958d159f41a","Type":"ContainerDied","Data":"e992af35d8c6bcbdc46d4551b3585bc7784221b845a02f9f9770cc8214026e2b"} Nov 24 08:48:42 crc kubenswrapper[4718]: I1124 08:48:42.605024 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b303474-4101-4aeb-b45d-5fd4c668b75c" path="/var/lib/kubelet/pods/1b303474-4101-4aeb-b45d-5fd4c668b75c/volumes" Nov 24 08:48:44 crc kubenswrapper[4718]: I1124 08:48:44.656338 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s4njf" event={"ID":"b62f4d7b-76b5-402e-bb8b-b958d159f41a","Type":"ContainerStarted","Data":"19bbb6f27a049fc05986d2920a2f7846242b1fa1893369c208b25973b6cbc23e"} Nov 24 08:48:44 crc kubenswrapper[4718]: I1124 08:48:44.674247 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-s4njf" podStartSLOduration=3.211453022 podStartE2EDuration="6.674228977s" podCreationTimestamp="2025-11-24 08:48:38 +0000 UTC" firstStartedPulling="2025-11-24 08:48:40.618653458 +0000 UTC m=+792.734944362" lastFinishedPulling="2025-11-24 08:48:44.081429413 +0000 UTC m=+796.197720317" observedRunningTime="2025-11-24 08:48:44.67172177 +0000 UTC m=+796.788012704" watchObservedRunningTime="2025-11-24 08:48:44.674228977 +0000 UTC m=+796.790519891" Nov 24 08:48:45 crc kubenswrapper[4718]: I1124 08:48:45.247601 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/memcached-0" Nov 24 08:48:48 crc kubenswrapper[4718]: I1124 08:48:48.671759 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-s4njf" Nov 24 08:48:48 crc kubenswrapper[4718]: I1124 08:48:48.673405 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-s4njf" Nov 24 08:48:48 crc kubenswrapper[4718]: I1124 08:48:48.709053 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-s4njf" Nov 24 08:48:49 crc kubenswrapper[4718]: I1124 08:48:49.216387 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/rabbitmq-cluster-operator-index-vv2ld" Nov 24 08:48:49 crc kubenswrapper[4718]: I1124 08:48:49.216447 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/rabbitmq-cluster-operator-index-vv2ld" Nov 24 08:48:49 crc kubenswrapper[4718]: I1124 08:48:49.243946 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/rabbitmq-cluster-operator-index-vv2ld" Nov 24 08:48:49 crc kubenswrapper[4718]: I1124 08:48:49.256857 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:49 crc kubenswrapper[4718]: I1124 08:48:49.329956 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/openstack-galera-1" Nov 24 08:48:49 crc kubenswrapper[4718]: I1124 08:48:49.709036 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/rabbitmq-cluster-operator-index-vv2ld" Nov 24 08:48:49 crc kubenswrapper[4718]: I1124 08:48:49.733393 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-s4njf" Nov 24 08:48:52 crc kubenswrapper[4718]: I1124 08:48:52.361174 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:52 crc kubenswrapper[4718]: I1124 08:48:52.422386 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/openstack-galera-0" Nov 24 08:48:53 crc kubenswrapper[4718]: I1124 08:48:53.476644 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-s4njf"] Nov 24 08:48:53 crc kubenswrapper[4718]: I1124 08:48:53.476873 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-s4njf" podUID="b62f4d7b-76b5-402e-bb8b-b958d159f41a" containerName="registry-server" containerID="cri-o://19bbb6f27a049fc05986d2920a2f7846242b1fa1893369c208b25973b6cbc23e" gracePeriod=2 Nov 24 08:48:53 crc kubenswrapper[4718]: I1124 08:48:53.709860 4718 generic.go:334] "Generic (PLEG): container finished" podID="b62f4d7b-76b5-402e-bb8b-b958d159f41a" containerID="19bbb6f27a049fc05986d2920a2f7846242b1fa1893369c208b25973b6cbc23e" exitCode=0 Nov 24 08:48:53 crc kubenswrapper[4718]: I1124 08:48:53.710009 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s4njf" event={"ID":"b62f4d7b-76b5-402e-bb8b-b958d159f41a","Type":"ContainerDied","Data":"19bbb6f27a049fc05986d2920a2f7846242b1fa1893369c208b25973b6cbc23e"} Nov 24 08:48:53 crc kubenswrapper[4718]: I1124 08:48:53.870574 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s4njf" Nov 24 08:48:53 crc kubenswrapper[4718]: I1124 08:48:53.954533 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b62f4d7b-76b5-402e-bb8b-b958d159f41a-catalog-content\") pod \"b62f4d7b-76b5-402e-bb8b-b958d159f41a\" (UID: \"b62f4d7b-76b5-402e-bb8b-b958d159f41a\") " Nov 24 08:48:53 crc kubenswrapper[4718]: I1124 08:48:53.954620 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b62f4d7b-76b5-402e-bb8b-b958d159f41a-utilities\") pod \"b62f4d7b-76b5-402e-bb8b-b958d159f41a\" (UID: \"b62f4d7b-76b5-402e-bb8b-b958d159f41a\") " Nov 24 08:48:53 crc kubenswrapper[4718]: I1124 08:48:53.954674 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zktd6\" (UniqueName: \"kubernetes.io/projected/b62f4d7b-76b5-402e-bb8b-b958d159f41a-kube-api-access-zktd6\") pod \"b62f4d7b-76b5-402e-bb8b-b958d159f41a\" (UID: \"b62f4d7b-76b5-402e-bb8b-b958d159f41a\") " Nov 24 08:48:53 crc kubenswrapper[4718]: I1124 08:48:53.955438 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b62f4d7b-76b5-402e-bb8b-b958d159f41a-utilities" (OuterVolumeSpecName: "utilities") pod "b62f4d7b-76b5-402e-bb8b-b958d159f41a" (UID: "b62f4d7b-76b5-402e-bb8b-b958d159f41a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:48:53 crc kubenswrapper[4718]: I1124 08:48:53.959408 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b62f4d7b-76b5-402e-bb8b-b958d159f41a-kube-api-access-zktd6" (OuterVolumeSpecName: "kube-api-access-zktd6") pod "b62f4d7b-76b5-402e-bb8b-b958d159f41a" (UID: "b62f4d7b-76b5-402e-bb8b-b958d159f41a"). InnerVolumeSpecName "kube-api-access-zktd6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:48:53 crc kubenswrapper[4718]: I1124 08:48:53.969992 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b62f4d7b-76b5-402e-bb8b-b958d159f41a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b62f4d7b-76b5-402e-bb8b-b958d159f41a" (UID: "b62f4d7b-76b5-402e-bb8b-b958d159f41a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.056517 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b62f4d7b-76b5-402e-bb8b-b958d159f41a-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.056550 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b62f4d7b-76b5-402e-bb8b-b958d159f41a-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.056560 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zktd6\" (UniqueName: \"kubernetes.io/projected/b62f4d7b-76b5-402e-bb8b-b958d159f41a-kube-api-access-zktd6\") on node \"crc\" DevicePath \"\"" Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.716982 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s4njf" event={"ID":"b62f4d7b-76b5-402e-bb8b-b958d159f41a","Type":"ContainerDied","Data":"ea10faf6d51f81f16a20037b74af0ba4c1f02e010dbb22d25440907ea04bde73"} Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.717267 4718 scope.go:117] "RemoveContainer" containerID="19bbb6f27a049fc05986d2920a2f7846242b1fa1893369c208b25973b6cbc23e" Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.717079 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s4njf" Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.737561 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-s4njf"] Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.742093 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-s4njf"] Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.744404 4718 scope.go:117] "RemoveContainer" containerID="e992af35d8c6bcbdc46d4551b3585bc7784221b845a02f9f9770cc8214026e2b" Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.760164 4718 scope.go:117] "RemoveContainer" containerID="0fe24d643c4c56ec175aeeec3e147993c7c8efacd7063e41e2fb11cede1d1bb4" Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.885038 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bnpnc"] Nov 24 08:48:54 crc kubenswrapper[4718]: E1124 08:48:54.885394 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b62f4d7b-76b5-402e-bb8b-b958d159f41a" containerName="extract-utilities" Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.885411 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="b62f4d7b-76b5-402e-bb8b-b958d159f41a" containerName="extract-utilities" Nov 24 08:48:54 crc kubenswrapper[4718]: E1124 08:48:54.885428 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b303474-4101-4aeb-b45d-5fd4c668b75c" containerName="registry-server" Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.885436 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b303474-4101-4aeb-b45d-5fd4c668b75c" containerName="registry-server" Nov 24 08:48:54 crc kubenswrapper[4718]: E1124 08:48:54.885448 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b62f4d7b-76b5-402e-bb8b-b958d159f41a" containerName="extract-content" Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.885457 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="b62f4d7b-76b5-402e-bb8b-b958d159f41a" containerName="extract-content" Nov 24 08:48:54 crc kubenswrapper[4718]: E1124 08:48:54.885468 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b62f4d7b-76b5-402e-bb8b-b958d159f41a" containerName="registry-server" Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.885476 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="b62f4d7b-76b5-402e-bb8b-b958d159f41a" containerName="registry-server" Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.885608 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="b62f4d7b-76b5-402e-bb8b-b958d159f41a" containerName="registry-server" Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.885637 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b303474-4101-4aeb-b45d-5fd4c668b75c" containerName="registry-server" Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.887538 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bnpnc" Nov 24 08:48:54 crc kubenswrapper[4718]: I1124 08:48:54.896265 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bnpnc"] Nov 24 08:48:55 crc kubenswrapper[4718]: I1124 08:48:55.067635 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b0a7b87-6605-48e4-aff1-8393159231ff-catalog-content\") pod \"community-operators-bnpnc\" (UID: \"9b0a7b87-6605-48e4-aff1-8393159231ff\") " pod="openshift-marketplace/community-operators-bnpnc" Nov 24 08:48:55 crc kubenswrapper[4718]: I1124 08:48:55.067834 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnvrq\" (UniqueName: \"kubernetes.io/projected/9b0a7b87-6605-48e4-aff1-8393159231ff-kube-api-access-xnvrq\") pod \"community-operators-bnpnc\" (UID: \"9b0a7b87-6605-48e4-aff1-8393159231ff\") " pod="openshift-marketplace/community-operators-bnpnc" Nov 24 08:48:55 crc kubenswrapper[4718]: I1124 08:48:55.067893 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b0a7b87-6605-48e4-aff1-8393159231ff-utilities\") pod \"community-operators-bnpnc\" (UID: \"9b0a7b87-6605-48e4-aff1-8393159231ff\") " pod="openshift-marketplace/community-operators-bnpnc" Nov 24 08:48:55 crc kubenswrapper[4718]: I1124 08:48:55.168873 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b0a7b87-6605-48e4-aff1-8393159231ff-utilities\") pod \"community-operators-bnpnc\" (UID: \"9b0a7b87-6605-48e4-aff1-8393159231ff\") " pod="openshift-marketplace/community-operators-bnpnc" Nov 24 08:48:55 crc kubenswrapper[4718]: I1124 08:48:55.168992 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b0a7b87-6605-48e4-aff1-8393159231ff-catalog-content\") pod \"community-operators-bnpnc\" (UID: \"9b0a7b87-6605-48e4-aff1-8393159231ff\") " pod="openshift-marketplace/community-operators-bnpnc" Nov 24 08:48:55 crc kubenswrapper[4718]: I1124 08:48:55.169015 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnvrq\" (UniqueName: \"kubernetes.io/projected/9b0a7b87-6605-48e4-aff1-8393159231ff-kube-api-access-xnvrq\") pod \"community-operators-bnpnc\" (UID: \"9b0a7b87-6605-48e4-aff1-8393159231ff\") " pod="openshift-marketplace/community-operators-bnpnc" Nov 24 08:48:55 crc kubenswrapper[4718]: I1124 08:48:55.169450 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b0a7b87-6605-48e4-aff1-8393159231ff-utilities\") pod \"community-operators-bnpnc\" (UID: \"9b0a7b87-6605-48e4-aff1-8393159231ff\") " pod="openshift-marketplace/community-operators-bnpnc" Nov 24 08:48:55 crc kubenswrapper[4718]: I1124 08:48:55.169475 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b0a7b87-6605-48e4-aff1-8393159231ff-catalog-content\") pod \"community-operators-bnpnc\" (UID: \"9b0a7b87-6605-48e4-aff1-8393159231ff\") " pod="openshift-marketplace/community-operators-bnpnc" Nov 24 08:48:55 crc kubenswrapper[4718]: I1124 08:48:55.190279 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnvrq\" (UniqueName: \"kubernetes.io/projected/9b0a7b87-6605-48e4-aff1-8393159231ff-kube-api-access-xnvrq\") pod \"community-operators-bnpnc\" (UID: \"9b0a7b87-6605-48e4-aff1-8393159231ff\") " pod="openshift-marketplace/community-operators-bnpnc" Nov 24 08:48:55 crc kubenswrapper[4718]: I1124 08:48:55.209902 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bnpnc" Nov 24 08:48:55 crc kubenswrapper[4718]: I1124 08:48:55.696594 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bnpnc"] Nov 24 08:48:55 crc kubenswrapper[4718]: I1124 08:48:55.727506 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bnpnc" event={"ID":"9b0a7b87-6605-48e4-aff1-8393159231ff","Type":"ContainerStarted","Data":"8f41a70277bc67ecb5545b831c14cadb510a5e8a773e81f6ea32096461b05fa9"} Nov 24 08:48:56 crc kubenswrapper[4718]: I1124 08:48:56.604298 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b62f4d7b-76b5-402e-bb8b-b958d159f41a" path="/var/lib/kubelet/pods/b62f4d7b-76b5-402e-bb8b-b958d159f41a/volumes" Nov 24 08:48:56 crc kubenswrapper[4718]: I1124 08:48:56.735825 4718 generic.go:334] "Generic (PLEG): container finished" podID="9b0a7b87-6605-48e4-aff1-8393159231ff" containerID="f96ed44d93c7607307f482bffc86f62f51ccd2cbb46fb3cc71032863d0102fce" exitCode=0 Nov 24 08:48:56 crc kubenswrapper[4718]: I1124 08:48:56.735874 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bnpnc" event={"ID":"9b0a7b87-6605-48e4-aff1-8393159231ff","Type":"ContainerDied","Data":"f96ed44d93c7607307f482bffc86f62f51ccd2cbb46fb3cc71032863d0102fce"} Nov 24 08:48:57 crc kubenswrapper[4718]: I1124 08:48:57.745448 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bnpnc" event={"ID":"9b0a7b87-6605-48e4-aff1-8393159231ff","Type":"ContainerStarted","Data":"39d3d3b56c6b2db2f4d0deb8e1fbc3c693b25320430fbd762c9c0d044bb34321"} Nov 24 08:48:58 crc kubenswrapper[4718]: I1124 08:48:58.752881 4718 generic.go:334] "Generic (PLEG): container finished" podID="9b0a7b87-6605-48e4-aff1-8393159231ff" containerID="39d3d3b56c6b2db2f4d0deb8e1fbc3c693b25320430fbd762c9c0d044bb34321" exitCode=0 Nov 24 08:48:58 crc kubenswrapper[4718]: I1124 08:48:58.752989 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bnpnc" event={"ID":"9b0a7b87-6605-48e4-aff1-8393159231ff","Type":"ContainerDied","Data":"39d3d3b56c6b2db2f4d0deb8e1fbc3c693b25320430fbd762c9c0d044bb34321"} Nov 24 08:48:59 crc kubenswrapper[4718]: I1124 08:48:59.761027 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bnpnc" event={"ID":"9b0a7b87-6605-48e4-aff1-8393159231ff","Type":"ContainerStarted","Data":"9f1b87e951d0566d5276682b0f880b53153b1bdd6cdbc7326c8bad01cf97fa7d"} Nov 24 08:48:59 crc kubenswrapper[4718]: I1124 08:48:59.782298 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bnpnc" podStartSLOduration=3.306444818 podStartE2EDuration="5.782277229s" podCreationTimestamp="2025-11-24 08:48:54 +0000 UTC" firstStartedPulling="2025-11-24 08:48:56.737500984 +0000 UTC m=+808.853791888" lastFinishedPulling="2025-11-24 08:48:59.213333405 +0000 UTC m=+811.329624299" observedRunningTime="2025-11-24 08:48:59.777705966 +0000 UTC m=+811.893996870" watchObservedRunningTime="2025-11-24 08:48:59.782277229 +0000 UTC m=+811.898568133" Nov 24 08:49:02 crc kubenswrapper[4718]: I1124 08:49:02.482651 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nlclt"] Nov 24 08:49:02 crc kubenswrapper[4718]: I1124 08:49:02.484230 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nlclt" Nov 24 08:49:02 crc kubenswrapper[4718]: I1124 08:49:02.515361 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nlclt"] Nov 24 08:49:02 crc kubenswrapper[4718]: I1124 08:49:02.564747 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c-catalog-content\") pod \"certified-operators-nlclt\" (UID: \"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c\") " pod="openshift-marketplace/certified-operators-nlclt" Nov 24 08:49:02 crc kubenswrapper[4718]: I1124 08:49:02.564802 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c-utilities\") pod \"certified-operators-nlclt\" (UID: \"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c\") " pod="openshift-marketplace/certified-operators-nlclt" Nov 24 08:49:02 crc kubenswrapper[4718]: I1124 08:49:02.564848 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xl4p\" (UniqueName: \"kubernetes.io/projected/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c-kube-api-access-4xl4p\") pod \"certified-operators-nlclt\" (UID: \"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c\") " pod="openshift-marketplace/certified-operators-nlclt" Nov 24 08:49:02 crc kubenswrapper[4718]: I1124 08:49:02.665759 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xl4p\" (UniqueName: \"kubernetes.io/projected/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c-kube-api-access-4xl4p\") pod \"certified-operators-nlclt\" (UID: \"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c\") " pod="openshift-marketplace/certified-operators-nlclt" Nov 24 08:49:02 crc kubenswrapper[4718]: I1124 08:49:02.665840 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c-catalog-content\") pod \"certified-operators-nlclt\" (UID: \"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c\") " pod="openshift-marketplace/certified-operators-nlclt" Nov 24 08:49:02 crc kubenswrapper[4718]: I1124 08:49:02.665878 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c-utilities\") pod \"certified-operators-nlclt\" (UID: \"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c\") " pod="openshift-marketplace/certified-operators-nlclt" Nov 24 08:49:02 crc kubenswrapper[4718]: I1124 08:49:02.666463 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c-catalog-content\") pod \"certified-operators-nlclt\" (UID: \"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c\") " pod="openshift-marketplace/certified-operators-nlclt" Nov 24 08:49:02 crc kubenswrapper[4718]: I1124 08:49:02.666470 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c-utilities\") pod \"certified-operators-nlclt\" (UID: \"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c\") " pod="openshift-marketplace/certified-operators-nlclt" Nov 24 08:49:02 crc kubenswrapper[4718]: I1124 08:49:02.685883 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xl4p\" (UniqueName: \"kubernetes.io/projected/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c-kube-api-access-4xl4p\") pod \"certified-operators-nlclt\" (UID: \"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c\") " pod="openshift-marketplace/certified-operators-nlclt" Nov 24 08:49:02 crc kubenswrapper[4718]: I1124 08:49:02.808162 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nlclt" Nov 24 08:49:03 crc kubenswrapper[4718]: I1124 08:49:03.252221 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nlclt"] Nov 24 08:49:03 crc kubenswrapper[4718]: W1124 08:49:03.254806 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfbb70f3a_eb62_4086_aa6d_b1b3c0a1c58c.slice/crio-1994b9a415473a855c1eb41a2874826d9ccf0d5290913b974c03e17552161b61 WatchSource:0}: Error finding container 1994b9a415473a855c1eb41a2874826d9ccf0d5290913b974c03e17552161b61: Status 404 returned error can't find the container with id 1994b9a415473a855c1eb41a2874826d9ccf0d5290913b974c03e17552161b61 Nov 24 08:49:03 crc kubenswrapper[4718]: I1124 08:49:03.784835 4718 generic.go:334] "Generic (PLEG): container finished" podID="fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c" containerID="dcec4e4671e8e318e4ed24d6191d937b680ea0e4376f4925b1d565617ce1f435" exitCode=0 Nov 24 08:49:03 crc kubenswrapper[4718]: I1124 08:49:03.784885 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nlclt" event={"ID":"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c","Type":"ContainerDied","Data":"dcec4e4671e8e318e4ed24d6191d937b680ea0e4376f4925b1d565617ce1f435"} Nov 24 08:49:03 crc kubenswrapper[4718]: I1124 08:49:03.785100 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nlclt" event={"ID":"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c","Type":"ContainerStarted","Data":"1994b9a415473a855c1eb41a2874826d9ccf0d5290913b974c03e17552161b61"} Nov 24 08:49:04 crc kubenswrapper[4718]: I1124 08:49:04.795208 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nlclt" event={"ID":"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c","Type":"ContainerStarted","Data":"ae7cef4270429791df476dbbddfd7848cd2747f328494b1739741519cffcedbd"} Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.210339 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bnpnc" Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.210671 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bnpnc" Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.247681 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bnpnc" Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.329946 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk"] Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.331307 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk" Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.333447 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-fq6vw" Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.341676 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk"] Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.499585 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/595c017b-b3a6-41ef-aaba-4aa42c28da88-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk\" (UID: \"595c017b-b3a6-41ef-aaba-4aa42c28da88\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk" Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.499660 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/595c017b-b3a6-41ef-aaba-4aa42c28da88-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk\" (UID: \"595c017b-b3a6-41ef-aaba-4aa42c28da88\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk" Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.499753 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whdjc\" (UniqueName: \"kubernetes.io/projected/595c017b-b3a6-41ef-aaba-4aa42c28da88-kube-api-access-whdjc\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk\" (UID: \"595c017b-b3a6-41ef-aaba-4aa42c28da88\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk" Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.601220 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whdjc\" (UniqueName: \"kubernetes.io/projected/595c017b-b3a6-41ef-aaba-4aa42c28da88-kube-api-access-whdjc\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk\" (UID: \"595c017b-b3a6-41ef-aaba-4aa42c28da88\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk" Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.601302 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/595c017b-b3a6-41ef-aaba-4aa42c28da88-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk\" (UID: \"595c017b-b3a6-41ef-aaba-4aa42c28da88\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk" Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.601341 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/595c017b-b3a6-41ef-aaba-4aa42c28da88-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk\" (UID: \"595c017b-b3a6-41ef-aaba-4aa42c28da88\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk" Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.601788 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/595c017b-b3a6-41ef-aaba-4aa42c28da88-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk\" (UID: \"595c017b-b3a6-41ef-aaba-4aa42c28da88\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk" Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.601929 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/595c017b-b3a6-41ef-aaba-4aa42c28da88-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk\" (UID: \"595c017b-b3a6-41ef-aaba-4aa42c28da88\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk" Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.624279 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whdjc\" (UniqueName: \"kubernetes.io/projected/595c017b-b3a6-41ef-aaba-4aa42c28da88-kube-api-access-whdjc\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk\" (UID: \"595c017b-b3a6-41ef-aaba-4aa42c28da88\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk" Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.648794 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk" Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.804707 4718 generic.go:334] "Generic (PLEG): container finished" podID="fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c" containerID="ae7cef4270429791df476dbbddfd7848cd2747f328494b1739741519cffcedbd" exitCode=0 Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.805346 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nlclt" event={"ID":"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c","Type":"ContainerDied","Data":"ae7cef4270429791df476dbbddfd7848cd2747f328494b1739741519cffcedbd"} Nov 24 08:49:05 crc kubenswrapper[4718]: I1124 08:49:05.861988 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bnpnc" Nov 24 08:49:06 crc kubenswrapper[4718]: I1124 08:49:06.067376 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk"] Nov 24 08:49:06 crc kubenswrapper[4718]: W1124 08:49:06.087164 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod595c017b_b3a6_41ef_aaba_4aa42c28da88.slice/crio-6a2400d0573e88260f83d1036859cb8e92d2087e26a32c831c05ea72f469a1b1 WatchSource:0}: Error finding container 6a2400d0573e88260f83d1036859cb8e92d2087e26a32c831c05ea72f469a1b1: Status 404 returned error can't find the container with id 6a2400d0573e88260f83d1036859cb8e92d2087e26a32c831c05ea72f469a1b1 Nov 24 08:49:06 crc kubenswrapper[4718]: I1124 08:49:06.812489 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nlclt" event={"ID":"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c","Type":"ContainerStarted","Data":"38060b929957ce7aca595f1bb231f6a27aacf36a11cfdf853ab3ceb8c3286920"} Nov 24 08:49:06 crc kubenswrapper[4718]: I1124 08:49:06.814665 4718 generic.go:334] "Generic (PLEG): container finished" podID="595c017b-b3a6-41ef-aaba-4aa42c28da88" containerID="b0ace923be1a15b0d8d7808899dacef569df5287d8ed553ada9fc9f9a5ce31ab" exitCode=0 Nov 24 08:49:06 crc kubenswrapper[4718]: I1124 08:49:06.814752 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk" event={"ID":"595c017b-b3a6-41ef-aaba-4aa42c28da88","Type":"ContainerDied","Data":"b0ace923be1a15b0d8d7808899dacef569df5287d8ed553ada9fc9f9a5ce31ab"} Nov 24 08:49:06 crc kubenswrapper[4718]: I1124 08:49:06.814783 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk" event={"ID":"595c017b-b3a6-41ef-aaba-4aa42c28da88","Type":"ContainerStarted","Data":"6a2400d0573e88260f83d1036859cb8e92d2087e26a32c831c05ea72f469a1b1"} Nov 24 08:49:06 crc kubenswrapper[4718]: I1124 08:49:06.836838 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nlclt" podStartSLOduration=2.416650825 podStartE2EDuration="4.836813396s" podCreationTimestamp="2025-11-24 08:49:02 +0000 UTC" firstStartedPulling="2025-11-24 08:49:03.786386994 +0000 UTC m=+815.902677898" lastFinishedPulling="2025-11-24 08:49:06.206549565 +0000 UTC m=+818.322840469" observedRunningTime="2025-11-24 08:49:06.833812208 +0000 UTC m=+818.950103102" watchObservedRunningTime="2025-11-24 08:49:06.836813396 +0000 UTC m=+818.953104300" Nov 24 08:49:07 crc kubenswrapper[4718]: I1124 08:49:07.822958 4718 generic.go:334] "Generic (PLEG): container finished" podID="595c017b-b3a6-41ef-aaba-4aa42c28da88" containerID="f208164e0b5db2b501f4dca1b6f2d54c4a18d53a9fe6e9281774908a331905ba" exitCode=0 Nov 24 08:49:07 crc kubenswrapper[4718]: I1124 08:49:07.823024 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk" event={"ID":"595c017b-b3a6-41ef-aaba-4aa42c28da88","Type":"ContainerDied","Data":"f208164e0b5db2b501f4dca1b6f2d54c4a18d53a9fe6e9281774908a331905ba"} Nov 24 08:49:08 crc kubenswrapper[4718]: I1124 08:49:08.830959 4718 generic.go:334] "Generic (PLEG): container finished" podID="595c017b-b3a6-41ef-aaba-4aa42c28da88" containerID="a8a93999aff36f4737da412de842514f7d410f8c89b82e185cbd71586029df0a" exitCode=0 Nov 24 08:49:08 crc kubenswrapper[4718]: I1124 08:49:08.831020 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk" event={"ID":"595c017b-b3a6-41ef-aaba-4aa42c28da88","Type":"ContainerDied","Data":"a8a93999aff36f4737da412de842514f7d410f8c89b82e185cbd71586029df0a"} Nov 24 08:49:10 crc kubenswrapper[4718]: I1124 08:49:10.125083 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk" Nov 24 08:49:10 crc kubenswrapper[4718]: I1124 08:49:10.256849 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/595c017b-b3a6-41ef-aaba-4aa42c28da88-bundle\") pod \"595c017b-b3a6-41ef-aaba-4aa42c28da88\" (UID: \"595c017b-b3a6-41ef-aaba-4aa42c28da88\") " Nov 24 08:49:10 crc kubenswrapper[4718]: I1124 08:49:10.257068 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whdjc\" (UniqueName: \"kubernetes.io/projected/595c017b-b3a6-41ef-aaba-4aa42c28da88-kube-api-access-whdjc\") pod \"595c017b-b3a6-41ef-aaba-4aa42c28da88\" (UID: \"595c017b-b3a6-41ef-aaba-4aa42c28da88\") " Nov 24 08:49:10 crc kubenswrapper[4718]: I1124 08:49:10.257140 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/595c017b-b3a6-41ef-aaba-4aa42c28da88-util\") pod \"595c017b-b3a6-41ef-aaba-4aa42c28da88\" (UID: \"595c017b-b3a6-41ef-aaba-4aa42c28da88\") " Nov 24 08:49:10 crc kubenswrapper[4718]: I1124 08:49:10.258099 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/595c017b-b3a6-41ef-aaba-4aa42c28da88-bundle" (OuterVolumeSpecName: "bundle") pod "595c017b-b3a6-41ef-aaba-4aa42c28da88" (UID: "595c017b-b3a6-41ef-aaba-4aa42c28da88"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:49:10 crc kubenswrapper[4718]: I1124 08:49:10.262451 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/595c017b-b3a6-41ef-aaba-4aa42c28da88-kube-api-access-whdjc" (OuterVolumeSpecName: "kube-api-access-whdjc") pod "595c017b-b3a6-41ef-aaba-4aa42c28da88" (UID: "595c017b-b3a6-41ef-aaba-4aa42c28da88"). InnerVolumeSpecName "kube-api-access-whdjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:49:10 crc kubenswrapper[4718]: I1124 08:49:10.273591 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/595c017b-b3a6-41ef-aaba-4aa42c28da88-util" (OuterVolumeSpecName: "util") pod "595c017b-b3a6-41ef-aaba-4aa42c28da88" (UID: "595c017b-b3a6-41ef-aaba-4aa42c28da88"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:49:10 crc kubenswrapper[4718]: I1124 08:49:10.359313 4718 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/595c017b-b3a6-41ef-aaba-4aa42c28da88-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:49:10 crc kubenswrapper[4718]: I1124 08:49:10.359654 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whdjc\" (UniqueName: \"kubernetes.io/projected/595c017b-b3a6-41ef-aaba-4aa42c28da88-kube-api-access-whdjc\") on node \"crc\" DevicePath \"\"" Nov 24 08:49:10 crc kubenswrapper[4718]: I1124 08:49:10.359670 4718 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/595c017b-b3a6-41ef-aaba-4aa42c28da88-util\") on node \"crc\" DevicePath \"\"" Nov 24 08:49:10 crc kubenswrapper[4718]: I1124 08:49:10.845568 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk" event={"ID":"595c017b-b3a6-41ef-aaba-4aa42c28da88","Type":"ContainerDied","Data":"6a2400d0573e88260f83d1036859cb8e92d2087e26a32c831c05ea72f469a1b1"} Nov 24 08:49:10 crc kubenswrapper[4718]: I1124 08:49:10.845618 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a2400d0573e88260f83d1036859cb8e92d2087e26a32c831c05ea72f469a1b1" Nov 24 08:49:10 crc kubenswrapper[4718]: I1124 08:49:10.845629 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk" Nov 24 08:49:11 crc kubenswrapper[4718]: I1124 08:49:11.475846 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bnpnc"] Nov 24 08:49:11 crc kubenswrapper[4718]: I1124 08:49:11.476103 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bnpnc" podUID="9b0a7b87-6605-48e4-aff1-8393159231ff" containerName="registry-server" containerID="cri-o://9f1b87e951d0566d5276682b0f880b53153b1bdd6cdbc7326c8bad01cf97fa7d" gracePeriod=2 Nov 24 08:49:11 crc kubenswrapper[4718]: I1124 08:49:11.854629 4718 generic.go:334] "Generic (PLEG): container finished" podID="9b0a7b87-6605-48e4-aff1-8393159231ff" containerID="9f1b87e951d0566d5276682b0f880b53153b1bdd6cdbc7326c8bad01cf97fa7d" exitCode=0 Nov 24 08:49:11 crc kubenswrapper[4718]: I1124 08:49:11.854729 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bnpnc" event={"ID":"9b0a7b87-6605-48e4-aff1-8393159231ff","Type":"ContainerDied","Data":"9f1b87e951d0566d5276682b0f880b53153b1bdd6cdbc7326c8bad01cf97fa7d"} Nov 24 08:49:11 crc kubenswrapper[4718]: I1124 08:49:11.855009 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bnpnc" event={"ID":"9b0a7b87-6605-48e4-aff1-8393159231ff","Type":"ContainerDied","Data":"8f41a70277bc67ecb5545b831c14cadb510a5e8a773e81f6ea32096461b05fa9"} Nov 24 08:49:11 crc kubenswrapper[4718]: I1124 08:49:11.855029 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f41a70277bc67ecb5545b831c14cadb510a5e8a773e81f6ea32096461b05fa9" Nov 24 08:49:11 crc kubenswrapper[4718]: I1124 08:49:11.874922 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bnpnc" Nov 24 08:49:11 crc kubenswrapper[4718]: I1124 08:49:11.981422 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b0a7b87-6605-48e4-aff1-8393159231ff-catalog-content\") pod \"9b0a7b87-6605-48e4-aff1-8393159231ff\" (UID: \"9b0a7b87-6605-48e4-aff1-8393159231ff\") " Nov 24 08:49:11 crc kubenswrapper[4718]: I1124 08:49:11.981493 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b0a7b87-6605-48e4-aff1-8393159231ff-utilities\") pod \"9b0a7b87-6605-48e4-aff1-8393159231ff\" (UID: \"9b0a7b87-6605-48e4-aff1-8393159231ff\") " Nov 24 08:49:11 crc kubenswrapper[4718]: I1124 08:49:11.981547 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xnvrq\" (UniqueName: \"kubernetes.io/projected/9b0a7b87-6605-48e4-aff1-8393159231ff-kube-api-access-xnvrq\") pod \"9b0a7b87-6605-48e4-aff1-8393159231ff\" (UID: \"9b0a7b87-6605-48e4-aff1-8393159231ff\") " Nov 24 08:49:11 crc kubenswrapper[4718]: I1124 08:49:11.982447 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b0a7b87-6605-48e4-aff1-8393159231ff-utilities" (OuterVolumeSpecName: "utilities") pod "9b0a7b87-6605-48e4-aff1-8393159231ff" (UID: "9b0a7b87-6605-48e4-aff1-8393159231ff"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:49:11 crc kubenswrapper[4718]: I1124 08:49:11.987115 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b0a7b87-6605-48e4-aff1-8393159231ff-kube-api-access-xnvrq" (OuterVolumeSpecName: "kube-api-access-xnvrq") pod "9b0a7b87-6605-48e4-aff1-8393159231ff" (UID: "9b0a7b87-6605-48e4-aff1-8393159231ff"). InnerVolumeSpecName "kube-api-access-xnvrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:49:12 crc kubenswrapper[4718]: I1124 08:49:12.034789 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b0a7b87-6605-48e4-aff1-8393159231ff-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9b0a7b87-6605-48e4-aff1-8393159231ff" (UID: "9b0a7b87-6605-48e4-aff1-8393159231ff"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:49:12 crc kubenswrapper[4718]: I1124 08:49:12.083333 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b0a7b87-6605-48e4-aff1-8393159231ff-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 08:49:12 crc kubenswrapper[4718]: I1124 08:49:12.083375 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b0a7b87-6605-48e4-aff1-8393159231ff-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 08:49:12 crc kubenswrapper[4718]: I1124 08:49:12.083387 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xnvrq\" (UniqueName: \"kubernetes.io/projected/9b0a7b87-6605-48e4-aff1-8393159231ff-kube-api-access-xnvrq\") on node \"crc\" DevicePath \"\"" Nov 24 08:49:12 crc kubenswrapper[4718]: I1124 08:49:12.810010 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nlclt" Nov 24 08:49:12 crc kubenswrapper[4718]: I1124 08:49:12.810074 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nlclt" Nov 24 08:49:12 crc kubenswrapper[4718]: I1124 08:49:12.852703 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nlclt" Nov 24 08:49:12 crc kubenswrapper[4718]: I1124 08:49:12.859683 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bnpnc" Nov 24 08:49:12 crc kubenswrapper[4718]: I1124 08:49:12.891691 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bnpnc"] Nov 24 08:49:12 crc kubenswrapper[4718]: I1124 08:49:12.895393 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bnpnc"] Nov 24 08:49:12 crc kubenswrapper[4718]: I1124 08:49:12.904921 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nlclt" Nov 24 08:49:14 crc kubenswrapper[4718]: I1124 08:49:14.603453 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b0a7b87-6605-48e4-aff1-8393159231ff" path="/var/lib/kubelet/pods/9b0a7b87-6605-48e4-aff1-8393159231ff/volumes" Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.278607 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nlclt"] Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.279199 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nlclt" podUID="fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c" containerName="registry-server" containerID="cri-o://38060b929957ce7aca595f1bb231f6a27aacf36a11cfdf853ab3ceb8c3286920" gracePeriod=2 Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.676473 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nlclt" Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.794556 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c-catalog-content\") pod \"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c\" (UID: \"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c\") " Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.794685 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c-utilities\") pod \"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c\" (UID: \"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c\") " Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.794736 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xl4p\" (UniqueName: \"kubernetes.io/projected/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c-kube-api-access-4xl4p\") pod \"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c\" (UID: \"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c\") " Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.795460 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c-utilities" (OuterVolumeSpecName: "utilities") pod "fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c" (UID: "fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.801392 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c-kube-api-access-4xl4p" (OuterVolumeSpecName: "kube-api-access-4xl4p") pod "fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c" (UID: "fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c"). InnerVolumeSpecName "kube-api-access-4xl4p". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.847302 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c" (UID: "fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.896684 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xl4p\" (UniqueName: \"kubernetes.io/projected/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c-kube-api-access-4xl4p\") on node \"crc\" DevicePath \"\"" Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.896729 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.896742 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.920056 4718 generic.go:334] "Generic (PLEG): container finished" podID="fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c" containerID="38060b929957ce7aca595f1bb231f6a27aacf36a11cfdf853ab3ceb8c3286920" exitCode=0 Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.920094 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nlclt" event={"ID":"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c","Type":"ContainerDied","Data":"38060b929957ce7aca595f1bb231f6a27aacf36a11cfdf853ab3ceb8c3286920"} Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.920124 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nlclt" event={"ID":"fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c","Type":"ContainerDied","Data":"1994b9a415473a855c1eb41a2874826d9ccf0d5290913b974c03e17552161b61"} Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.920142 4718 scope.go:117] "RemoveContainer" containerID="38060b929957ce7aca595f1bb231f6a27aacf36a11cfdf853ab3ceb8c3286920" Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.920175 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nlclt" Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.939935 4718 scope.go:117] "RemoveContainer" containerID="ae7cef4270429791df476dbbddfd7848cd2747f328494b1739741519cffcedbd" Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.950756 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nlclt"] Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.955375 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nlclt"] Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.975444 4718 scope.go:117] "RemoveContainer" containerID="dcec4e4671e8e318e4ed24d6191d937b680ea0e4376f4925b1d565617ce1f435" Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.992819 4718 scope.go:117] "RemoveContainer" containerID="38060b929957ce7aca595f1bb231f6a27aacf36a11cfdf853ab3ceb8c3286920" Nov 24 08:49:20 crc kubenswrapper[4718]: E1124 08:49:20.993387 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38060b929957ce7aca595f1bb231f6a27aacf36a11cfdf853ab3ceb8c3286920\": container with ID starting with 38060b929957ce7aca595f1bb231f6a27aacf36a11cfdf853ab3ceb8c3286920 not found: ID does not exist" containerID="38060b929957ce7aca595f1bb231f6a27aacf36a11cfdf853ab3ceb8c3286920" Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.993459 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38060b929957ce7aca595f1bb231f6a27aacf36a11cfdf853ab3ceb8c3286920"} err="failed to get container status \"38060b929957ce7aca595f1bb231f6a27aacf36a11cfdf853ab3ceb8c3286920\": rpc error: code = NotFound desc = could not find container \"38060b929957ce7aca595f1bb231f6a27aacf36a11cfdf853ab3ceb8c3286920\": container with ID starting with 38060b929957ce7aca595f1bb231f6a27aacf36a11cfdf853ab3ceb8c3286920 not found: ID does not exist" Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.993507 4718 scope.go:117] "RemoveContainer" containerID="ae7cef4270429791df476dbbddfd7848cd2747f328494b1739741519cffcedbd" Nov 24 08:49:20 crc kubenswrapper[4718]: E1124 08:49:20.993992 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae7cef4270429791df476dbbddfd7848cd2747f328494b1739741519cffcedbd\": container with ID starting with ae7cef4270429791df476dbbddfd7848cd2747f328494b1739741519cffcedbd not found: ID does not exist" containerID="ae7cef4270429791df476dbbddfd7848cd2747f328494b1739741519cffcedbd" Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.994045 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae7cef4270429791df476dbbddfd7848cd2747f328494b1739741519cffcedbd"} err="failed to get container status \"ae7cef4270429791df476dbbddfd7848cd2747f328494b1739741519cffcedbd\": rpc error: code = NotFound desc = could not find container \"ae7cef4270429791df476dbbddfd7848cd2747f328494b1739741519cffcedbd\": container with ID starting with ae7cef4270429791df476dbbddfd7848cd2747f328494b1739741519cffcedbd not found: ID does not exist" Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.994075 4718 scope.go:117] "RemoveContainer" containerID="dcec4e4671e8e318e4ed24d6191d937b680ea0e4376f4925b1d565617ce1f435" Nov 24 08:49:20 crc kubenswrapper[4718]: E1124 08:49:20.994360 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcec4e4671e8e318e4ed24d6191d937b680ea0e4376f4925b1d565617ce1f435\": container with ID starting with dcec4e4671e8e318e4ed24d6191d937b680ea0e4376f4925b1d565617ce1f435 not found: ID does not exist" containerID="dcec4e4671e8e318e4ed24d6191d937b680ea0e4376f4925b1d565617ce1f435" Nov 24 08:49:20 crc kubenswrapper[4718]: I1124 08:49:20.994408 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcec4e4671e8e318e4ed24d6191d937b680ea0e4376f4925b1d565617ce1f435"} err="failed to get container status \"dcec4e4671e8e318e4ed24d6191d937b680ea0e4376f4925b1d565617ce1f435\": rpc error: code = NotFound desc = could not find container \"dcec4e4671e8e318e4ed24d6191d937b680ea0e4376f4925b1d565617ce1f435\": container with ID starting with dcec4e4671e8e318e4ed24d6191d937b680ea0e4376f4925b1d565617ce1f435 not found: ID does not exist" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.373661 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-zndb4"] Nov 24 08:49:21 crc kubenswrapper[4718]: E1124 08:49:21.373883 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b0a7b87-6605-48e4-aff1-8393159231ff" containerName="extract-content" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.373895 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b0a7b87-6605-48e4-aff1-8393159231ff" containerName="extract-content" Nov 24 08:49:21 crc kubenswrapper[4718]: E1124 08:49:21.373904 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b0a7b87-6605-48e4-aff1-8393159231ff" containerName="extract-utilities" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.373910 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b0a7b87-6605-48e4-aff1-8393159231ff" containerName="extract-utilities" Nov 24 08:49:21 crc kubenswrapper[4718]: E1124 08:49:21.373917 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="595c017b-b3a6-41ef-aaba-4aa42c28da88" containerName="extract" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.373924 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="595c017b-b3a6-41ef-aaba-4aa42c28da88" containerName="extract" Nov 24 08:49:21 crc kubenswrapper[4718]: E1124 08:49:21.373936 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c" containerName="registry-server" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.373941 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c" containerName="registry-server" Nov 24 08:49:21 crc kubenswrapper[4718]: E1124 08:49:21.373949 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b0a7b87-6605-48e4-aff1-8393159231ff" containerName="registry-server" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.373956 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b0a7b87-6605-48e4-aff1-8393159231ff" containerName="registry-server" Nov 24 08:49:21 crc kubenswrapper[4718]: E1124 08:49:21.373962 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="595c017b-b3a6-41ef-aaba-4aa42c28da88" containerName="pull" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.373989 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="595c017b-b3a6-41ef-aaba-4aa42c28da88" containerName="pull" Nov 24 08:49:21 crc kubenswrapper[4718]: E1124 08:49:21.374002 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c" containerName="extract-content" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.374010 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c" containerName="extract-content" Nov 24 08:49:21 crc kubenswrapper[4718]: E1124 08:49:21.374023 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="595c017b-b3a6-41ef-aaba-4aa42c28da88" containerName="util" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.374030 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="595c017b-b3a6-41ef-aaba-4aa42c28da88" containerName="util" Nov 24 08:49:21 crc kubenswrapper[4718]: E1124 08:49:21.374044 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c" containerName="extract-utilities" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.374051 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c" containerName="extract-utilities" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.374154 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="595c017b-b3a6-41ef-aaba-4aa42c28da88" containerName="extract" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.374165 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c" containerName="registry-server" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.374178 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b0a7b87-6605-48e4-aff1-8393159231ff" containerName="registry-server" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.374580 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-zndb4" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.386408 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-zndb4"] Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.387313 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-dockercfg-4nv67" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.505687 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf6jf\" (UniqueName: \"kubernetes.io/projected/b7b86409-7678-4f44-b87a-09837009fe67-kube-api-access-zf6jf\") pod \"rabbitmq-cluster-operator-779fc9694b-zndb4\" (UID: \"b7b86409-7678-4f44-b87a-09837009fe67\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-zndb4" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.607160 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf6jf\" (UniqueName: \"kubernetes.io/projected/b7b86409-7678-4f44-b87a-09837009fe67-kube-api-access-zf6jf\") pod \"rabbitmq-cluster-operator-779fc9694b-zndb4\" (UID: \"b7b86409-7678-4f44-b87a-09837009fe67\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-zndb4" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.633727 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf6jf\" (UniqueName: \"kubernetes.io/projected/b7b86409-7678-4f44-b87a-09837009fe67-kube-api-access-zf6jf\") pod \"rabbitmq-cluster-operator-779fc9694b-zndb4\" (UID: \"b7b86409-7678-4f44-b87a-09837009fe67\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-zndb4" Nov 24 08:49:21 crc kubenswrapper[4718]: I1124 08:49:21.688761 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-zndb4" Nov 24 08:49:22 crc kubenswrapper[4718]: I1124 08:49:22.094759 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-zndb4"] Nov 24 08:49:22 crc kubenswrapper[4718]: I1124 08:49:22.603985 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c" path="/var/lib/kubelet/pods/fbb70f3a-eb62-4086-aa6d-b1b3c0a1c58c/volumes" Nov 24 08:49:22 crc kubenswrapper[4718]: I1124 08:49:22.933713 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-zndb4" event={"ID":"b7b86409-7678-4f44-b87a-09837009fe67","Type":"ContainerStarted","Data":"31973a0f4213e7e84df2080780107395d029d2cb829fcd0ee23ee0af6493760b"} Nov 24 08:49:26 crc kubenswrapper[4718]: I1124 08:49:26.957054 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-zndb4" event={"ID":"b7b86409-7678-4f44-b87a-09837009fe67","Type":"ContainerStarted","Data":"23793fd1bf32a7fe1159061ffe6d70aff636f2889d89ff3c3538285b24ceaf5d"} Nov 24 08:49:26 crc kubenswrapper[4718]: I1124 08:49:26.978519 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-zndb4" podStartSLOduration=1.822710093 podStartE2EDuration="5.978496278s" podCreationTimestamp="2025-11-24 08:49:21 +0000 UTC" firstStartedPulling="2025-11-24 08:49:22.104981335 +0000 UTC m=+834.221272239" lastFinishedPulling="2025-11-24 08:49:26.26076752 +0000 UTC m=+838.377058424" observedRunningTime="2025-11-24 08:49:26.973287771 +0000 UTC m=+839.089578685" watchObservedRunningTime="2025-11-24 08:49:26.978496278 +0000 UTC m=+839.094787182" Nov 24 08:49:29 crc kubenswrapper[4718]: I1124 08:49:29.085254 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cjs4x"] Nov 24 08:49:29 crc kubenswrapper[4718]: I1124 08:49:29.086779 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjs4x" Nov 24 08:49:29 crc kubenswrapper[4718]: I1124 08:49:29.102988 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cjs4x"] Nov 24 08:49:29 crc kubenswrapper[4718]: I1124 08:49:29.213485 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/221d7f7c-5799-4b9b-8f7c-4fa1876eb113-catalog-content\") pod \"redhat-operators-cjs4x\" (UID: \"221d7f7c-5799-4b9b-8f7c-4fa1876eb113\") " pod="openshift-marketplace/redhat-operators-cjs4x" Nov 24 08:49:29 crc kubenswrapper[4718]: I1124 08:49:29.213542 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9qwq\" (UniqueName: \"kubernetes.io/projected/221d7f7c-5799-4b9b-8f7c-4fa1876eb113-kube-api-access-k9qwq\") pod \"redhat-operators-cjs4x\" (UID: \"221d7f7c-5799-4b9b-8f7c-4fa1876eb113\") " pod="openshift-marketplace/redhat-operators-cjs4x" Nov 24 08:49:29 crc kubenswrapper[4718]: I1124 08:49:29.213563 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/221d7f7c-5799-4b9b-8f7c-4fa1876eb113-utilities\") pod \"redhat-operators-cjs4x\" (UID: \"221d7f7c-5799-4b9b-8f7c-4fa1876eb113\") " pod="openshift-marketplace/redhat-operators-cjs4x" Nov 24 08:49:29 crc kubenswrapper[4718]: I1124 08:49:29.315116 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9qwq\" (UniqueName: \"kubernetes.io/projected/221d7f7c-5799-4b9b-8f7c-4fa1876eb113-kube-api-access-k9qwq\") pod \"redhat-operators-cjs4x\" (UID: \"221d7f7c-5799-4b9b-8f7c-4fa1876eb113\") " pod="openshift-marketplace/redhat-operators-cjs4x" Nov 24 08:49:29 crc kubenswrapper[4718]: I1124 08:49:29.315174 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/221d7f7c-5799-4b9b-8f7c-4fa1876eb113-utilities\") pod \"redhat-operators-cjs4x\" (UID: \"221d7f7c-5799-4b9b-8f7c-4fa1876eb113\") " pod="openshift-marketplace/redhat-operators-cjs4x" Nov 24 08:49:29 crc kubenswrapper[4718]: I1124 08:49:29.315276 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/221d7f7c-5799-4b9b-8f7c-4fa1876eb113-catalog-content\") pod \"redhat-operators-cjs4x\" (UID: \"221d7f7c-5799-4b9b-8f7c-4fa1876eb113\") " pod="openshift-marketplace/redhat-operators-cjs4x" Nov 24 08:49:29 crc kubenswrapper[4718]: I1124 08:49:29.315826 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/221d7f7c-5799-4b9b-8f7c-4fa1876eb113-catalog-content\") pod \"redhat-operators-cjs4x\" (UID: \"221d7f7c-5799-4b9b-8f7c-4fa1876eb113\") " pod="openshift-marketplace/redhat-operators-cjs4x" Nov 24 08:49:29 crc kubenswrapper[4718]: I1124 08:49:29.315908 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/221d7f7c-5799-4b9b-8f7c-4fa1876eb113-utilities\") pod \"redhat-operators-cjs4x\" (UID: \"221d7f7c-5799-4b9b-8f7c-4fa1876eb113\") " pod="openshift-marketplace/redhat-operators-cjs4x" Nov 24 08:49:29 crc kubenswrapper[4718]: I1124 08:49:29.333409 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9qwq\" (UniqueName: \"kubernetes.io/projected/221d7f7c-5799-4b9b-8f7c-4fa1876eb113-kube-api-access-k9qwq\") pod \"redhat-operators-cjs4x\" (UID: \"221d7f7c-5799-4b9b-8f7c-4fa1876eb113\") " pod="openshift-marketplace/redhat-operators-cjs4x" Nov 24 08:49:29 crc kubenswrapper[4718]: I1124 08:49:29.404412 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjs4x" Nov 24 08:49:29 crc kubenswrapper[4718]: I1124 08:49:29.619027 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cjs4x"] Nov 24 08:49:29 crc kubenswrapper[4718]: I1124 08:49:29.978642 4718 generic.go:334] "Generic (PLEG): container finished" podID="221d7f7c-5799-4b9b-8f7c-4fa1876eb113" containerID="7270b4333a0621848c0ea766663cf9c0683a10711127fa6bba0712064ac5d763" exitCode=0 Nov 24 08:49:29 crc kubenswrapper[4718]: I1124 08:49:29.978720 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjs4x" event={"ID":"221d7f7c-5799-4b9b-8f7c-4fa1876eb113","Type":"ContainerDied","Data":"7270b4333a0621848c0ea766663cf9c0683a10711127fa6bba0712064ac5d763"} Nov 24 08:49:29 crc kubenswrapper[4718]: I1124 08:49:29.979171 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjs4x" event={"ID":"221d7f7c-5799-4b9b-8f7c-4fa1876eb113","Type":"ContainerStarted","Data":"adb6cfad314a52c210a3fb3c77c2fa7ac05dfedd0322e9fe16c029579d30a7b8"} Nov 24 08:49:31 crc kubenswrapper[4718]: I1124 08:49:31.995370 4718 generic.go:334] "Generic (PLEG): container finished" podID="221d7f7c-5799-4b9b-8f7c-4fa1876eb113" containerID="f51f36488a4ce4bde0089e301c09869a204639b87e1ff604649da74bbfbb5f0f" exitCode=0 Nov 24 08:49:31 crc kubenswrapper[4718]: I1124 08:49:31.996172 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjs4x" event={"ID":"221d7f7c-5799-4b9b-8f7c-4fa1876eb113","Type":"ContainerDied","Data":"f51f36488a4ce4bde0089e301c09869a204639b87e1ff604649da74bbfbb5f0f"} Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.003444 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjs4x" event={"ID":"221d7f7c-5799-4b9b-8f7c-4fa1876eb113","Type":"ContainerStarted","Data":"21697a824857c85d5d94c0e4149f9c441e0ada9f12c449287c026283a42c71a4"} Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.021462 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cjs4x" podStartSLOduration=1.5859154640000002 podStartE2EDuration="4.021440732s" podCreationTimestamp="2025-11-24 08:49:29 +0000 UTC" firstStartedPulling="2025-11-24 08:49:29.980173518 +0000 UTC m=+842.096464422" lastFinishedPulling="2025-11-24 08:49:32.415698786 +0000 UTC m=+844.531989690" observedRunningTime="2025-11-24 08:49:33.018246345 +0000 UTC m=+845.134537259" watchObservedRunningTime="2025-11-24 08:49:33.021440732 +0000 UTC m=+845.137731626" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.239174 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/rabbitmq-server-0"] Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.240834 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.243087 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"rabbitmq-erlang-cookie" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.243349 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"rabbitmq-server-dockercfg-78n5q" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.243523 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"rabbitmq-server-conf" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.243657 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"rabbitmq-plugins-conf" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.243868 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"rabbitmq-default-user" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.257285 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/rabbitmq-server-0"] Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.377767 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-2f7885df-139d-4ac4-b27f-d675b8887814\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2f7885df-139d-4ac4-b27f-d675b8887814\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.377818 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/71536c12-d65b-479d-b35f-43579c0c2e91-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.377863 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/71536c12-d65b-479d-b35f-43579c0c2e91-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.377905 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/71536c12-d65b-479d-b35f-43579c0c2e91-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.378046 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhplh\" (UniqueName: \"kubernetes.io/projected/71536c12-d65b-479d-b35f-43579c0c2e91-kube-api-access-bhplh\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.378096 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/71536c12-d65b-479d-b35f-43579c0c2e91-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.378191 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/71536c12-d65b-479d-b35f-43579c0c2e91-pod-info\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.378253 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/71536c12-d65b-479d-b35f-43579c0c2e91-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.480029 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/71536c12-d65b-479d-b35f-43579c0c2e91-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.480127 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-2f7885df-139d-4ac4-b27f-d675b8887814\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2f7885df-139d-4ac4-b27f-d675b8887814\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.480160 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/71536c12-d65b-479d-b35f-43579c0c2e91-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.480211 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/71536c12-d65b-479d-b35f-43579c0c2e91-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.480242 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/71536c12-d65b-479d-b35f-43579c0c2e91-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.480281 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhplh\" (UniqueName: \"kubernetes.io/projected/71536c12-d65b-479d-b35f-43579c0c2e91-kube-api-access-bhplh\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.480303 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/71536c12-d65b-479d-b35f-43579c0c2e91-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.480325 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/71536c12-d65b-479d-b35f-43579c0c2e91-pod-info\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.480876 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/71536c12-d65b-479d-b35f-43579c0c2e91-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.481114 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/71536c12-d65b-479d-b35f-43579c0c2e91-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.481757 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/71536c12-d65b-479d-b35f-43579c0c2e91-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.482879 4718 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.482910 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-2f7885df-139d-4ac4-b27f-d675b8887814\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2f7885df-139d-4ac4-b27f-d675b8887814\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f72538138601ce45fa9691d68279552267409b30fb1032bf45b36e6c03241262/globalmount\"" pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.486091 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/71536c12-d65b-479d-b35f-43579c0c2e91-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.486183 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/71536c12-d65b-479d-b35f-43579c0c2e91-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.486246 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/71536c12-d65b-479d-b35f-43579c0c2e91-pod-info\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.497934 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhplh\" (UniqueName: \"kubernetes.io/projected/71536c12-d65b-479d-b35f-43579c0c2e91-kube-api-access-bhplh\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.510396 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-2f7885df-139d-4ac4-b27f-d675b8887814\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2f7885df-139d-4ac4-b27f-d675b8887814\") pod \"rabbitmq-server-0\" (UID: \"71536c12-d65b-479d-b35f-43579c0c2e91\") " pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.562524 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:49:33 crc kubenswrapper[4718]: I1124 08:49:33.984452 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/rabbitmq-server-0"] Nov 24 08:49:33 crc kubenswrapper[4718]: W1124 08:49:33.986027 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71536c12_d65b_479d_b35f_43579c0c2e91.slice/crio-6dca42d2faec8ee1559e3b8dc214777cad78b0fbebbb1c3bba4cb30df6cc3c22 WatchSource:0}: Error finding container 6dca42d2faec8ee1559e3b8dc214777cad78b0fbebbb1c3bba4cb30df6cc3c22: Status 404 returned error can't find the container with id 6dca42d2faec8ee1559e3b8dc214777cad78b0fbebbb1c3bba4cb30df6cc3c22 Nov 24 08:49:34 crc kubenswrapper[4718]: I1124 08:49:34.013253 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/rabbitmq-server-0" event={"ID":"71536c12-d65b-479d-b35f-43579c0c2e91","Type":"ContainerStarted","Data":"6dca42d2faec8ee1559e3b8dc214777cad78b0fbebbb1c3bba4cb30df6cc3c22"} Nov 24 08:49:36 crc kubenswrapper[4718]: I1124 08:49:36.486585 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-index-wwpx8"] Nov 24 08:49:36 crc kubenswrapper[4718]: I1124 08:49:36.487902 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-wwpx8" Nov 24 08:49:36 crc kubenswrapper[4718]: I1124 08:49:36.490431 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-index-dockercfg-hqqqf" Nov 24 08:49:36 crc kubenswrapper[4718]: I1124 08:49:36.500620 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-wwpx8"] Nov 24 08:49:36 crc kubenswrapper[4718]: I1124 08:49:36.639568 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6b5p\" (UniqueName: \"kubernetes.io/projected/c8548438-fc18-4313-8c6b-06352e15cb59-kube-api-access-b6b5p\") pod \"keystone-operator-index-wwpx8\" (UID: \"c8548438-fc18-4313-8c6b-06352e15cb59\") " pod="openstack-operators/keystone-operator-index-wwpx8" Nov 24 08:49:36 crc kubenswrapper[4718]: I1124 08:49:36.740924 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6b5p\" (UniqueName: \"kubernetes.io/projected/c8548438-fc18-4313-8c6b-06352e15cb59-kube-api-access-b6b5p\") pod \"keystone-operator-index-wwpx8\" (UID: \"c8548438-fc18-4313-8c6b-06352e15cb59\") " pod="openstack-operators/keystone-operator-index-wwpx8" Nov 24 08:49:36 crc kubenswrapper[4718]: I1124 08:49:36.777881 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6b5p\" (UniqueName: \"kubernetes.io/projected/c8548438-fc18-4313-8c6b-06352e15cb59-kube-api-access-b6b5p\") pod \"keystone-operator-index-wwpx8\" (UID: \"c8548438-fc18-4313-8c6b-06352e15cb59\") " pod="openstack-operators/keystone-operator-index-wwpx8" Nov 24 08:49:36 crc kubenswrapper[4718]: I1124 08:49:36.821436 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-wwpx8" Nov 24 08:49:39 crc kubenswrapper[4718]: I1124 08:49:39.130729 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-wwpx8"] Nov 24 08:49:39 crc kubenswrapper[4718]: I1124 08:49:39.405580 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cjs4x" Nov 24 08:49:39 crc kubenswrapper[4718]: I1124 08:49:39.405648 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cjs4x" Nov 24 08:49:39 crc kubenswrapper[4718]: I1124 08:49:39.464368 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cjs4x" Nov 24 08:49:40 crc kubenswrapper[4718]: I1124 08:49:40.058160 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-wwpx8" event={"ID":"c8548438-fc18-4313-8c6b-06352e15cb59","Type":"ContainerStarted","Data":"3928603c7b8fa2255cf65eff200fd32f3f687ee0b0721c09f994b75056636d7c"} Nov 24 08:49:40 crc kubenswrapper[4718]: I1124 08:49:40.098039 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cjs4x" Nov 24 08:49:41 crc kubenswrapper[4718]: I1124 08:49:41.066302 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/rabbitmq-server-0" event={"ID":"71536c12-d65b-479d-b35f-43579c0c2e91","Type":"ContainerStarted","Data":"a996db3a826d10dc6703238571a49c509243a9efed88b40570e24dc0b22bcd10"} Nov 24 08:49:41 crc kubenswrapper[4718]: I1124 08:49:41.476716 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cjs4x"] Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.074229 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-wwpx8" event={"ID":"c8548438-fc18-4313-8c6b-06352e15cb59","Type":"ContainerStarted","Data":"8a918165f9dacc9bd31bfbcc6699afd7c075a73060f96f053b68f852c4148674"} Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.074439 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cjs4x" podUID="221d7f7c-5799-4b9b-8f7c-4fa1876eb113" containerName="registry-server" containerID="cri-o://21697a824857c85d5d94c0e4149f9c441e0ada9f12c449287c026283a42c71a4" gracePeriod=2 Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.085068 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-wwpx8"] Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.094401 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-index-wwpx8" podStartSLOduration=3.966432715 podStartE2EDuration="6.094381122s" podCreationTimestamp="2025-11-24 08:49:36 +0000 UTC" firstStartedPulling="2025-11-24 08:49:39.511061933 +0000 UTC m=+851.627352837" lastFinishedPulling="2025-11-24 08:49:41.63901034 +0000 UTC m=+853.755301244" observedRunningTime="2025-11-24 08:49:42.093300566 +0000 UTC m=+854.209591480" watchObservedRunningTime="2025-11-24 08:49:42.094381122 +0000 UTC m=+854.210672026" Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.675634 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjs4x" Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.832340 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9qwq\" (UniqueName: \"kubernetes.io/projected/221d7f7c-5799-4b9b-8f7c-4fa1876eb113-kube-api-access-k9qwq\") pod \"221d7f7c-5799-4b9b-8f7c-4fa1876eb113\" (UID: \"221d7f7c-5799-4b9b-8f7c-4fa1876eb113\") " Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.832925 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/221d7f7c-5799-4b9b-8f7c-4fa1876eb113-utilities\") pod \"221d7f7c-5799-4b9b-8f7c-4fa1876eb113\" (UID: \"221d7f7c-5799-4b9b-8f7c-4fa1876eb113\") " Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.833014 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/221d7f7c-5799-4b9b-8f7c-4fa1876eb113-catalog-content\") pod \"221d7f7c-5799-4b9b-8f7c-4fa1876eb113\" (UID: \"221d7f7c-5799-4b9b-8f7c-4fa1876eb113\") " Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.835419 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/221d7f7c-5799-4b9b-8f7c-4fa1876eb113-utilities" (OuterVolumeSpecName: "utilities") pod "221d7f7c-5799-4b9b-8f7c-4fa1876eb113" (UID: "221d7f7c-5799-4b9b-8f7c-4fa1876eb113"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.841142 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/221d7f7c-5799-4b9b-8f7c-4fa1876eb113-kube-api-access-k9qwq" (OuterVolumeSpecName: "kube-api-access-k9qwq") pod "221d7f7c-5799-4b9b-8f7c-4fa1876eb113" (UID: "221d7f7c-5799-4b9b-8f7c-4fa1876eb113"). InnerVolumeSpecName "kube-api-access-k9qwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.889018 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-index-ssr4h"] Nov 24 08:49:42 crc kubenswrapper[4718]: E1124 08:49:42.889428 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="221d7f7c-5799-4b9b-8f7c-4fa1876eb113" containerName="extract-content" Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.889452 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="221d7f7c-5799-4b9b-8f7c-4fa1876eb113" containerName="extract-content" Nov 24 08:49:42 crc kubenswrapper[4718]: E1124 08:49:42.889488 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="221d7f7c-5799-4b9b-8f7c-4fa1876eb113" containerName="registry-server" Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.889497 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="221d7f7c-5799-4b9b-8f7c-4fa1876eb113" containerName="registry-server" Nov 24 08:49:42 crc kubenswrapper[4718]: E1124 08:49:42.889509 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="221d7f7c-5799-4b9b-8f7c-4fa1876eb113" containerName="extract-utilities" Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.889518 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="221d7f7c-5799-4b9b-8f7c-4fa1876eb113" containerName="extract-utilities" Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.889671 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="221d7f7c-5799-4b9b-8f7c-4fa1876eb113" containerName="registry-server" Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.892542 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-ssr4h" Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.898436 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-ssr4h"] Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.935319 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9qwq\" (UniqueName: \"kubernetes.io/projected/221d7f7c-5799-4b9b-8f7c-4fa1876eb113-kube-api-access-k9qwq\") on node \"crc\" DevicePath \"\"" Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.935358 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/221d7f7c-5799-4b9b-8f7c-4fa1876eb113-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 08:49:42 crc kubenswrapper[4718]: I1124 08:49:42.938239 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/221d7f7c-5799-4b9b-8f7c-4fa1876eb113-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "221d7f7c-5799-4b9b-8f7c-4fa1876eb113" (UID: "221d7f7c-5799-4b9b-8f7c-4fa1876eb113"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.036903 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rl4k8\" (UniqueName: \"kubernetes.io/projected/435e52bb-774d-4264-a126-a362323f96d9-kube-api-access-rl4k8\") pod \"keystone-operator-index-ssr4h\" (UID: \"435e52bb-774d-4264-a126-a362323f96d9\") " pod="openstack-operators/keystone-operator-index-ssr4h" Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.037117 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/221d7f7c-5799-4b9b-8f7c-4fa1876eb113-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.082454 4718 generic.go:334] "Generic (PLEG): container finished" podID="221d7f7c-5799-4b9b-8f7c-4fa1876eb113" containerID="21697a824857c85d5d94c0e4149f9c441e0ada9f12c449287c026283a42c71a4" exitCode=0 Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.082985 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjs4x" Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.084559 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjs4x" event={"ID":"221d7f7c-5799-4b9b-8f7c-4fa1876eb113","Type":"ContainerDied","Data":"21697a824857c85d5d94c0e4149f9c441e0ada9f12c449287c026283a42c71a4"} Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.084675 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjs4x" event={"ID":"221d7f7c-5799-4b9b-8f7c-4fa1876eb113","Type":"ContainerDied","Data":"adb6cfad314a52c210a3fb3c77c2fa7ac05dfedd0322e9fe16c029579d30a7b8"} Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.084712 4718 scope.go:117] "RemoveContainer" containerID="21697a824857c85d5d94c0e4149f9c441e0ada9f12c449287c026283a42c71a4" Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.099679 4718 scope.go:117] "RemoveContainer" containerID="f51f36488a4ce4bde0089e301c09869a204639b87e1ff604649da74bbfbb5f0f" Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.111158 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cjs4x"] Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.115038 4718 scope.go:117] "RemoveContainer" containerID="7270b4333a0621848c0ea766663cf9c0683a10711127fa6bba0712064ac5d763" Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.115775 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cjs4x"] Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.138651 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rl4k8\" (UniqueName: \"kubernetes.io/projected/435e52bb-774d-4264-a126-a362323f96d9-kube-api-access-rl4k8\") pod \"keystone-operator-index-ssr4h\" (UID: \"435e52bb-774d-4264-a126-a362323f96d9\") " pod="openstack-operators/keystone-operator-index-ssr4h" Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.144458 4718 scope.go:117] "RemoveContainer" containerID="21697a824857c85d5d94c0e4149f9c441e0ada9f12c449287c026283a42c71a4" Nov 24 08:49:43 crc kubenswrapper[4718]: E1124 08:49:43.144868 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21697a824857c85d5d94c0e4149f9c441e0ada9f12c449287c026283a42c71a4\": container with ID starting with 21697a824857c85d5d94c0e4149f9c441e0ada9f12c449287c026283a42c71a4 not found: ID does not exist" containerID="21697a824857c85d5d94c0e4149f9c441e0ada9f12c449287c026283a42c71a4" Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.144917 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21697a824857c85d5d94c0e4149f9c441e0ada9f12c449287c026283a42c71a4"} err="failed to get container status \"21697a824857c85d5d94c0e4149f9c441e0ada9f12c449287c026283a42c71a4\": rpc error: code = NotFound desc = could not find container \"21697a824857c85d5d94c0e4149f9c441e0ada9f12c449287c026283a42c71a4\": container with ID starting with 21697a824857c85d5d94c0e4149f9c441e0ada9f12c449287c026283a42c71a4 not found: ID does not exist" Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.144942 4718 scope.go:117] "RemoveContainer" containerID="f51f36488a4ce4bde0089e301c09869a204639b87e1ff604649da74bbfbb5f0f" Nov 24 08:49:43 crc kubenswrapper[4718]: E1124 08:49:43.145287 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f51f36488a4ce4bde0089e301c09869a204639b87e1ff604649da74bbfbb5f0f\": container with ID starting with f51f36488a4ce4bde0089e301c09869a204639b87e1ff604649da74bbfbb5f0f not found: ID does not exist" containerID="f51f36488a4ce4bde0089e301c09869a204639b87e1ff604649da74bbfbb5f0f" Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.145316 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f51f36488a4ce4bde0089e301c09869a204639b87e1ff604649da74bbfbb5f0f"} err="failed to get container status \"f51f36488a4ce4bde0089e301c09869a204639b87e1ff604649da74bbfbb5f0f\": rpc error: code = NotFound desc = could not find container \"f51f36488a4ce4bde0089e301c09869a204639b87e1ff604649da74bbfbb5f0f\": container with ID starting with f51f36488a4ce4bde0089e301c09869a204639b87e1ff604649da74bbfbb5f0f not found: ID does not exist" Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.145332 4718 scope.go:117] "RemoveContainer" containerID="7270b4333a0621848c0ea766663cf9c0683a10711127fa6bba0712064ac5d763" Nov 24 08:49:43 crc kubenswrapper[4718]: E1124 08:49:43.145677 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7270b4333a0621848c0ea766663cf9c0683a10711127fa6bba0712064ac5d763\": container with ID starting with 7270b4333a0621848c0ea766663cf9c0683a10711127fa6bba0712064ac5d763 not found: ID does not exist" containerID="7270b4333a0621848c0ea766663cf9c0683a10711127fa6bba0712064ac5d763" Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.145699 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7270b4333a0621848c0ea766663cf9c0683a10711127fa6bba0712064ac5d763"} err="failed to get container status \"7270b4333a0621848c0ea766663cf9c0683a10711127fa6bba0712064ac5d763\": rpc error: code = NotFound desc = could not find container \"7270b4333a0621848c0ea766663cf9c0683a10711127fa6bba0712064ac5d763\": container with ID starting with 7270b4333a0621848c0ea766663cf9c0683a10711127fa6bba0712064ac5d763 not found: ID does not exist" Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.158813 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rl4k8\" (UniqueName: \"kubernetes.io/projected/435e52bb-774d-4264-a126-a362323f96d9-kube-api-access-rl4k8\") pod \"keystone-operator-index-ssr4h\" (UID: \"435e52bb-774d-4264-a126-a362323f96d9\") " pod="openstack-operators/keystone-operator-index-ssr4h" Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.206886 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-ssr4h" Nov 24 08:49:43 crc kubenswrapper[4718]: I1124 08:49:43.598072 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-ssr4h"] Nov 24 08:49:43 crc kubenswrapper[4718]: W1124 08:49:43.605290 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod435e52bb_774d_4264_a126_a362323f96d9.slice/crio-306a6883a1ca64c3d92218016d5340adaa6731df19528994dccbd48662026955 WatchSource:0}: Error finding container 306a6883a1ca64c3d92218016d5340adaa6731df19528994dccbd48662026955: Status 404 returned error can't find the container with id 306a6883a1ca64c3d92218016d5340adaa6731df19528994dccbd48662026955 Nov 24 08:49:44 crc kubenswrapper[4718]: I1124 08:49:44.090197 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-ssr4h" event={"ID":"435e52bb-774d-4264-a126-a362323f96d9","Type":"ContainerStarted","Data":"306a6883a1ca64c3d92218016d5340adaa6731df19528994dccbd48662026955"} Nov 24 08:49:44 crc kubenswrapper[4718]: I1124 08:49:44.090344 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-index-wwpx8" podUID="c8548438-fc18-4313-8c6b-06352e15cb59" containerName="registry-server" containerID="cri-o://8a918165f9dacc9bd31bfbcc6699afd7c075a73060f96f053b68f852c4148674" gracePeriod=2 Nov 24 08:49:44 crc kubenswrapper[4718]: I1124 08:49:44.598503 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-wwpx8" Nov 24 08:49:44 crc kubenswrapper[4718]: I1124 08:49:44.603469 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="221d7f7c-5799-4b9b-8f7c-4fa1876eb113" path="/var/lib/kubelet/pods/221d7f7c-5799-4b9b-8f7c-4fa1876eb113/volumes" Nov 24 08:49:44 crc kubenswrapper[4718]: I1124 08:49:44.758826 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6b5p\" (UniqueName: \"kubernetes.io/projected/c8548438-fc18-4313-8c6b-06352e15cb59-kube-api-access-b6b5p\") pod \"c8548438-fc18-4313-8c6b-06352e15cb59\" (UID: \"c8548438-fc18-4313-8c6b-06352e15cb59\") " Nov 24 08:49:44 crc kubenswrapper[4718]: I1124 08:49:44.763794 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8548438-fc18-4313-8c6b-06352e15cb59-kube-api-access-b6b5p" (OuterVolumeSpecName: "kube-api-access-b6b5p") pod "c8548438-fc18-4313-8c6b-06352e15cb59" (UID: "c8548438-fc18-4313-8c6b-06352e15cb59"). InnerVolumeSpecName "kube-api-access-b6b5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:49:44 crc kubenswrapper[4718]: I1124 08:49:44.860880 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6b5p\" (UniqueName: \"kubernetes.io/projected/c8548438-fc18-4313-8c6b-06352e15cb59-kube-api-access-b6b5p\") on node \"crc\" DevicePath \"\"" Nov 24 08:49:45 crc kubenswrapper[4718]: I1124 08:49:45.097852 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-ssr4h" event={"ID":"435e52bb-774d-4264-a126-a362323f96d9","Type":"ContainerStarted","Data":"6d79057dfc4ce8b157f104f837554b90e198edd2f015c61e2c103aa314edd1f8"} Nov 24 08:49:45 crc kubenswrapper[4718]: I1124 08:49:45.099276 4718 generic.go:334] "Generic (PLEG): container finished" podID="c8548438-fc18-4313-8c6b-06352e15cb59" containerID="8a918165f9dacc9bd31bfbcc6699afd7c075a73060f96f053b68f852c4148674" exitCode=0 Nov 24 08:49:45 crc kubenswrapper[4718]: I1124 08:49:45.099311 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-wwpx8" Nov 24 08:49:45 crc kubenswrapper[4718]: I1124 08:49:45.099319 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-wwpx8" event={"ID":"c8548438-fc18-4313-8c6b-06352e15cb59","Type":"ContainerDied","Data":"8a918165f9dacc9bd31bfbcc6699afd7c075a73060f96f053b68f852c4148674"} Nov 24 08:49:45 crc kubenswrapper[4718]: I1124 08:49:45.099727 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-wwpx8" event={"ID":"c8548438-fc18-4313-8c6b-06352e15cb59","Type":"ContainerDied","Data":"3928603c7b8fa2255cf65eff200fd32f3f687ee0b0721c09f994b75056636d7c"} Nov 24 08:49:45 crc kubenswrapper[4718]: I1124 08:49:45.099757 4718 scope.go:117] "RemoveContainer" containerID="8a918165f9dacc9bd31bfbcc6699afd7c075a73060f96f053b68f852c4148674" Nov 24 08:49:45 crc kubenswrapper[4718]: I1124 08:49:45.117722 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-index-ssr4h" podStartSLOduration=2.53592703 podStartE2EDuration="3.117701215s" podCreationTimestamp="2025-11-24 08:49:42 +0000 UTC" firstStartedPulling="2025-11-24 08:49:43.609153882 +0000 UTC m=+855.725444786" lastFinishedPulling="2025-11-24 08:49:44.190928067 +0000 UTC m=+856.307218971" observedRunningTime="2025-11-24 08:49:45.11623052 +0000 UTC m=+857.232521424" watchObservedRunningTime="2025-11-24 08:49:45.117701215 +0000 UTC m=+857.233992119" Nov 24 08:49:45 crc kubenswrapper[4718]: I1124 08:49:45.129631 4718 scope.go:117] "RemoveContainer" containerID="8a918165f9dacc9bd31bfbcc6699afd7c075a73060f96f053b68f852c4148674" Nov 24 08:49:45 crc kubenswrapper[4718]: E1124 08:49:45.130092 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a918165f9dacc9bd31bfbcc6699afd7c075a73060f96f053b68f852c4148674\": container with ID starting with 8a918165f9dacc9bd31bfbcc6699afd7c075a73060f96f053b68f852c4148674 not found: ID does not exist" containerID="8a918165f9dacc9bd31bfbcc6699afd7c075a73060f96f053b68f852c4148674" Nov 24 08:49:45 crc kubenswrapper[4718]: I1124 08:49:45.130150 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a918165f9dacc9bd31bfbcc6699afd7c075a73060f96f053b68f852c4148674"} err="failed to get container status \"8a918165f9dacc9bd31bfbcc6699afd7c075a73060f96f053b68f852c4148674\": rpc error: code = NotFound desc = could not find container \"8a918165f9dacc9bd31bfbcc6699afd7c075a73060f96f053b68f852c4148674\": container with ID starting with 8a918165f9dacc9bd31bfbcc6699afd7c075a73060f96f053b68f852c4148674 not found: ID does not exist" Nov 24 08:49:45 crc kubenswrapper[4718]: I1124 08:49:45.136584 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-wwpx8"] Nov 24 08:49:45 crc kubenswrapper[4718]: I1124 08:49:45.140754 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-index-wwpx8"] Nov 24 08:49:46 crc kubenswrapper[4718]: I1124 08:49:46.604888 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8548438-fc18-4313-8c6b-06352e15cb59" path="/var/lib/kubelet/pods/c8548438-fc18-4313-8c6b-06352e15cb59/volumes" Nov 24 08:49:53 crc kubenswrapper[4718]: I1124 08:49:53.207913 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-index-ssr4h" Nov 24 08:49:53 crc kubenswrapper[4718]: I1124 08:49:53.208403 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/keystone-operator-index-ssr4h" Nov 24 08:49:53 crc kubenswrapper[4718]: I1124 08:49:53.231025 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/keystone-operator-index-ssr4h" Nov 24 08:49:54 crc kubenswrapper[4718]: I1124 08:49:54.177300 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-index-ssr4h" Nov 24 08:50:08 crc kubenswrapper[4718]: I1124 08:50:08.948650 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82"] Nov 24 08:50:08 crc kubenswrapper[4718]: E1124 08:50:08.949427 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8548438-fc18-4313-8c6b-06352e15cb59" containerName="registry-server" Nov 24 08:50:08 crc kubenswrapper[4718]: I1124 08:50:08.949439 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8548438-fc18-4313-8c6b-06352e15cb59" containerName="registry-server" Nov 24 08:50:08 crc kubenswrapper[4718]: I1124 08:50:08.949562 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8548438-fc18-4313-8c6b-06352e15cb59" containerName="registry-server" Nov 24 08:50:08 crc kubenswrapper[4718]: I1124 08:50:08.951994 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82" Nov 24 08:50:08 crc kubenswrapper[4718]: I1124 08:50:08.954807 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-fq6vw" Nov 24 08:50:08 crc kubenswrapper[4718]: I1124 08:50:08.970615 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82"] Nov 24 08:50:09 crc kubenswrapper[4718]: I1124 08:50:09.016501 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e641cf59-bf41-4b81-a0ac-75e8a61baf37-bundle\") pod \"e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82\" (UID: \"e641cf59-bf41-4b81-a0ac-75e8a61baf37\") " pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82" Nov 24 08:50:09 crc kubenswrapper[4718]: I1124 08:50:09.016569 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzlsl\" (UniqueName: \"kubernetes.io/projected/e641cf59-bf41-4b81-a0ac-75e8a61baf37-kube-api-access-xzlsl\") pod \"e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82\" (UID: \"e641cf59-bf41-4b81-a0ac-75e8a61baf37\") " pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82" Nov 24 08:50:09 crc kubenswrapper[4718]: I1124 08:50:09.016602 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e641cf59-bf41-4b81-a0ac-75e8a61baf37-util\") pod \"e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82\" (UID: \"e641cf59-bf41-4b81-a0ac-75e8a61baf37\") " pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82" Nov 24 08:50:09 crc kubenswrapper[4718]: I1124 08:50:09.117723 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e641cf59-bf41-4b81-a0ac-75e8a61baf37-util\") pod \"e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82\" (UID: \"e641cf59-bf41-4b81-a0ac-75e8a61baf37\") " pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82" Nov 24 08:50:09 crc kubenswrapper[4718]: I1124 08:50:09.117805 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e641cf59-bf41-4b81-a0ac-75e8a61baf37-bundle\") pod \"e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82\" (UID: \"e641cf59-bf41-4b81-a0ac-75e8a61baf37\") " pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82" Nov 24 08:50:09 crc kubenswrapper[4718]: I1124 08:50:09.117863 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzlsl\" (UniqueName: \"kubernetes.io/projected/e641cf59-bf41-4b81-a0ac-75e8a61baf37-kube-api-access-xzlsl\") pod \"e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82\" (UID: \"e641cf59-bf41-4b81-a0ac-75e8a61baf37\") " pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82" Nov 24 08:50:09 crc kubenswrapper[4718]: I1124 08:50:09.118291 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e641cf59-bf41-4b81-a0ac-75e8a61baf37-util\") pod \"e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82\" (UID: \"e641cf59-bf41-4b81-a0ac-75e8a61baf37\") " pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82" Nov 24 08:50:09 crc kubenswrapper[4718]: I1124 08:50:09.118325 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e641cf59-bf41-4b81-a0ac-75e8a61baf37-bundle\") pod \"e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82\" (UID: \"e641cf59-bf41-4b81-a0ac-75e8a61baf37\") " pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82" Nov 24 08:50:09 crc kubenswrapper[4718]: I1124 08:50:09.136379 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzlsl\" (UniqueName: \"kubernetes.io/projected/e641cf59-bf41-4b81-a0ac-75e8a61baf37-kube-api-access-xzlsl\") pod \"e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82\" (UID: \"e641cf59-bf41-4b81-a0ac-75e8a61baf37\") " pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82" Nov 24 08:50:09 crc kubenswrapper[4718]: I1124 08:50:09.273691 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82" Nov 24 08:50:09 crc kubenswrapper[4718]: I1124 08:50:09.528457 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82"] Nov 24 08:50:10 crc kubenswrapper[4718]: I1124 08:50:10.260408 4718 generic.go:334] "Generic (PLEG): container finished" podID="e641cf59-bf41-4b81-a0ac-75e8a61baf37" containerID="87b21fc77594d723a090490dccb56a59570c9f50f29226d02167fd901427f832" exitCode=0 Nov 24 08:50:10 crc kubenswrapper[4718]: I1124 08:50:10.260455 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82" event={"ID":"e641cf59-bf41-4b81-a0ac-75e8a61baf37","Type":"ContainerDied","Data":"87b21fc77594d723a090490dccb56a59570c9f50f29226d02167fd901427f832"} Nov 24 08:50:10 crc kubenswrapper[4718]: I1124 08:50:10.260672 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82" event={"ID":"e641cf59-bf41-4b81-a0ac-75e8a61baf37","Type":"ContainerStarted","Data":"7adaeb1b02461d7f143e1e8a2ab51e881ab1be79f3e00e3bdc3e86be92f35528"} Nov 24 08:50:11 crc kubenswrapper[4718]: I1124 08:50:11.269514 4718 generic.go:334] "Generic (PLEG): container finished" podID="e641cf59-bf41-4b81-a0ac-75e8a61baf37" containerID="05aff48db0ea11f2d31c5cbbd5724314cc4fd54d9327ad7d05c0d5a46206e657" exitCode=0 Nov 24 08:50:11 crc kubenswrapper[4718]: I1124 08:50:11.269589 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82" event={"ID":"e641cf59-bf41-4b81-a0ac-75e8a61baf37","Type":"ContainerDied","Data":"05aff48db0ea11f2d31c5cbbd5724314cc4fd54d9327ad7d05c0d5a46206e657"} Nov 24 08:50:12 crc kubenswrapper[4718]: I1124 08:50:12.279220 4718 generic.go:334] "Generic (PLEG): container finished" podID="e641cf59-bf41-4b81-a0ac-75e8a61baf37" containerID="7af93b5410b44f7fa7f3abc22b052d434f92e5fa867bd3ae28294a82022a0184" exitCode=0 Nov 24 08:50:12 crc kubenswrapper[4718]: I1124 08:50:12.279309 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82" event={"ID":"e641cf59-bf41-4b81-a0ac-75e8a61baf37","Type":"ContainerDied","Data":"7af93b5410b44f7fa7f3abc22b052d434f92e5fa867bd3ae28294a82022a0184"} Nov 24 08:50:12 crc kubenswrapper[4718]: I1124 08:50:12.281842 4718 generic.go:334] "Generic (PLEG): container finished" podID="71536c12-d65b-479d-b35f-43579c0c2e91" containerID="a996db3a826d10dc6703238571a49c509243a9efed88b40570e24dc0b22bcd10" exitCode=0 Nov 24 08:50:12 crc kubenswrapper[4718]: I1124 08:50:12.281887 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/rabbitmq-server-0" event={"ID":"71536c12-d65b-479d-b35f-43579c0c2e91","Type":"ContainerDied","Data":"a996db3a826d10dc6703238571a49c509243a9efed88b40570e24dc0b22bcd10"} Nov 24 08:50:13 crc kubenswrapper[4718]: I1124 08:50:13.292667 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/rabbitmq-server-0" event={"ID":"71536c12-d65b-479d-b35f-43579c0c2e91","Type":"ContainerStarted","Data":"12685b4350235a78acbfc191ff9b5971862d948ce3bb1eee1b81aa0b65b9cf85"} Nov 24 08:50:13 crc kubenswrapper[4718]: I1124 08:50:13.294024 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:50:13 crc kubenswrapper[4718]: I1124 08:50:13.321324 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/rabbitmq-server-0" podStartSLOduration=35.72289144 podStartE2EDuration="41.321308334s" podCreationTimestamp="2025-11-24 08:49:32 +0000 UTC" firstStartedPulling="2025-11-24 08:49:33.988263807 +0000 UTC m=+846.104554711" lastFinishedPulling="2025-11-24 08:49:39.586680701 +0000 UTC m=+851.702971605" observedRunningTime="2025-11-24 08:50:13.3165241 +0000 UTC m=+885.432815014" watchObservedRunningTime="2025-11-24 08:50:13.321308334 +0000 UTC m=+885.437599238" Nov 24 08:50:13 crc kubenswrapper[4718]: I1124 08:50:13.530286 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82" Nov 24 08:50:13 crc kubenswrapper[4718]: I1124 08:50:13.684258 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e641cf59-bf41-4b81-a0ac-75e8a61baf37-util\") pod \"e641cf59-bf41-4b81-a0ac-75e8a61baf37\" (UID: \"e641cf59-bf41-4b81-a0ac-75e8a61baf37\") " Nov 24 08:50:13 crc kubenswrapper[4718]: I1124 08:50:13.684614 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzlsl\" (UniqueName: \"kubernetes.io/projected/e641cf59-bf41-4b81-a0ac-75e8a61baf37-kube-api-access-xzlsl\") pod \"e641cf59-bf41-4b81-a0ac-75e8a61baf37\" (UID: \"e641cf59-bf41-4b81-a0ac-75e8a61baf37\") " Nov 24 08:50:13 crc kubenswrapper[4718]: I1124 08:50:13.685367 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e641cf59-bf41-4b81-a0ac-75e8a61baf37-bundle\") pod \"e641cf59-bf41-4b81-a0ac-75e8a61baf37\" (UID: \"e641cf59-bf41-4b81-a0ac-75e8a61baf37\") " Nov 24 08:50:13 crc kubenswrapper[4718]: I1124 08:50:13.685924 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e641cf59-bf41-4b81-a0ac-75e8a61baf37-bundle" (OuterVolumeSpecName: "bundle") pod "e641cf59-bf41-4b81-a0ac-75e8a61baf37" (UID: "e641cf59-bf41-4b81-a0ac-75e8a61baf37"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:50:13 crc kubenswrapper[4718]: I1124 08:50:13.689665 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e641cf59-bf41-4b81-a0ac-75e8a61baf37-kube-api-access-xzlsl" (OuterVolumeSpecName: "kube-api-access-xzlsl") pod "e641cf59-bf41-4b81-a0ac-75e8a61baf37" (UID: "e641cf59-bf41-4b81-a0ac-75e8a61baf37"). InnerVolumeSpecName "kube-api-access-xzlsl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:50:13 crc kubenswrapper[4718]: I1124 08:50:13.698912 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e641cf59-bf41-4b81-a0ac-75e8a61baf37-util" (OuterVolumeSpecName: "util") pod "e641cf59-bf41-4b81-a0ac-75e8a61baf37" (UID: "e641cf59-bf41-4b81-a0ac-75e8a61baf37"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:50:13 crc kubenswrapper[4718]: I1124 08:50:13.787354 4718 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e641cf59-bf41-4b81-a0ac-75e8a61baf37-util\") on node \"crc\" DevicePath \"\"" Nov 24 08:50:13 crc kubenswrapper[4718]: I1124 08:50:13.787398 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzlsl\" (UniqueName: \"kubernetes.io/projected/e641cf59-bf41-4b81-a0ac-75e8a61baf37-kube-api-access-xzlsl\") on node \"crc\" DevicePath \"\"" Nov 24 08:50:13 crc kubenswrapper[4718]: I1124 08:50:13.787410 4718 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e641cf59-bf41-4b81-a0ac-75e8a61baf37-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:50:14 crc kubenswrapper[4718]: I1124 08:50:14.300760 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82" Nov 24 08:50:14 crc kubenswrapper[4718]: I1124 08:50:14.310511 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82" event={"ID":"e641cf59-bf41-4b81-a0ac-75e8a61baf37","Type":"ContainerDied","Data":"7adaeb1b02461d7f143e1e8a2ab51e881ab1be79f3e00e3bdc3e86be92f35528"} Nov 24 08:50:14 crc kubenswrapper[4718]: I1124 08:50:14.310543 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7adaeb1b02461d7f143e1e8a2ab51e881ab1be79f3e00e3bdc3e86be92f35528" Nov 24 08:50:23 crc kubenswrapper[4718]: I1124 08:50:23.566027 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/rabbitmq-server-0" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.007181 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd"] Nov 24 08:50:25 crc kubenswrapper[4718]: E1124 08:50:25.008174 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e641cf59-bf41-4b81-a0ac-75e8a61baf37" containerName="extract" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.008194 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="e641cf59-bf41-4b81-a0ac-75e8a61baf37" containerName="extract" Nov 24 08:50:25 crc kubenswrapper[4718]: E1124 08:50:25.008213 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e641cf59-bf41-4b81-a0ac-75e8a61baf37" containerName="util" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.008220 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="e641cf59-bf41-4b81-a0ac-75e8a61baf37" containerName="util" Nov 24 08:50:25 crc kubenswrapper[4718]: E1124 08:50:25.008226 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e641cf59-bf41-4b81-a0ac-75e8a61baf37" containerName="pull" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.008233 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="e641cf59-bf41-4b81-a0ac-75e8a61baf37" containerName="pull" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.008400 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="e641cf59-bf41-4b81-a0ac-75e8a61baf37" containerName="extract" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.009397 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.012709 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-96sdt" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.013018 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-service-cert" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.020477 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd"] Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.141281 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/50aceec1-fd1a-4728-abad-a3ed31345a27-webhook-cert\") pod \"keystone-operator-controller-manager-dfbd56c94-wc9zd\" (UID: \"50aceec1-fd1a-4728-abad-a3ed31345a27\") " pod="openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.141382 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/50aceec1-fd1a-4728-abad-a3ed31345a27-apiservice-cert\") pod \"keystone-operator-controller-manager-dfbd56c94-wc9zd\" (UID: \"50aceec1-fd1a-4728-abad-a3ed31345a27\") " pod="openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.141408 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwmcr\" (UniqueName: \"kubernetes.io/projected/50aceec1-fd1a-4728-abad-a3ed31345a27-kube-api-access-pwmcr\") pod \"keystone-operator-controller-manager-dfbd56c94-wc9zd\" (UID: \"50aceec1-fd1a-4728-abad-a3ed31345a27\") " pod="openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.242900 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/50aceec1-fd1a-4728-abad-a3ed31345a27-apiservice-cert\") pod \"keystone-operator-controller-manager-dfbd56c94-wc9zd\" (UID: \"50aceec1-fd1a-4728-abad-a3ed31345a27\") " pod="openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.242943 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwmcr\" (UniqueName: \"kubernetes.io/projected/50aceec1-fd1a-4728-abad-a3ed31345a27-kube-api-access-pwmcr\") pod \"keystone-operator-controller-manager-dfbd56c94-wc9zd\" (UID: \"50aceec1-fd1a-4728-abad-a3ed31345a27\") " pod="openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.243046 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/50aceec1-fd1a-4728-abad-a3ed31345a27-webhook-cert\") pod \"keystone-operator-controller-manager-dfbd56c94-wc9zd\" (UID: \"50aceec1-fd1a-4728-abad-a3ed31345a27\") " pod="openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.256894 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/50aceec1-fd1a-4728-abad-a3ed31345a27-webhook-cert\") pod \"keystone-operator-controller-manager-dfbd56c94-wc9zd\" (UID: \"50aceec1-fd1a-4728-abad-a3ed31345a27\") " pod="openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.256943 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/50aceec1-fd1a-4728-abad-a3ed31345a27-apiservice-cert\") pod \"keystone-operator-controller-manager-dfbd56c94-wc9zd\" (UID: \"50aceec1-fd1a-4728-abad-a3ed31345a27\") " pod="openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.259015 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwmcr\" (UniqueName: \"kubernetes.io/projected/50aceec1-fd1a-4728-abad-a3ed31345a27-kube-api-access-pwmcr\") pod \"keystone-operator-controller-manager-dfbd56c94-wc9zd\" (UID: \"50aceec1-fd1a-4728-abad-a3ed31345a27\") " pod="openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.328875 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd" Nov 24 08:50:25 crc kubenswrapper[4718]: I1124 08:50:25.727902 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd"] Nov 24 08:50:26 crc kubenswrapper[4718]: I1124 08:50:26.368284 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd" event={"ID":"50aceec1-fd1a-4728-abad-a3ed31345a27","Type":"ContainerStarted","Data":"44a142b536e9383bc83ba956d8593bb367675c92c69ba9211c915ea152a554d4"} Nov 24 08:50:28 crc kubenswrapper[4718]: I1124 08:50:28.382585 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd" event={"ID":"50aceec1-fd1a-4728-abad-a3ed31345a27","Type":"ContainerStarted","Data":"d13c915fb829b564c970075684e87ae05b99d6fbae1e7ce4495fa85f321bcf90"} Nov 24 08:50:28 crc kubenswrapper[4718]: I1124 08:50:28.383168 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd" event={"ID":"50aceec1-fd1a-4728-abad-a3ed31345a27","Type":"ContainerStarted","Data":"4946cc0764cf999f0edfaa59b7e543c98fe139dd0ef37c529f9751454d38d3e4"} Nov 24 08:50:28 crc kubenswrapper[4718]: I1124 08:50:28.384126 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd" Nov 24 08:50:28 crc kubenswrapper[4718]: I1124 08:50:28.405180 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd" podStartSLOduration=2.292374041 podStartE2EDuration="4.405159105s" podCreationTimestamp="2025-11-24 08:50:24 +0000 UTC" firstStartedPulling="2025-11-24 08:50:25.73582786 +0000 UTC m=+897.852118764" lastFinishedPulling="2025-11-24 08:50:27.848612924 +0000 UTC m=+899.964903828" observedRunningTime="2025-11-24 08:50:28.401231561 +0000 UTC m=+900.517522475" watchObservedRunningTime="2025-11-24 08:50:28.405159105 +0000 UTC m=+900.521450009" Nov 24 08:50:35 crc kubenswrapper[4718]: I1124 08:50:35.333159 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-dfbd56c94-wc9zd" Nov 24 08:50:42 crc kubenswrapper[4718]: I1124 08:50:42.895389 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-index-mqctp"] Nov 24 08:50:42 crc kubenswrapper[4718]: I1124 08:50:42.896947 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-mqctp" Nov 24 08:50:42 crc kubenswrapper[4718]: I1124 08:50:42.899577 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-index-dockercfg-9mqg5" Nov 24 08:50:42 crc kubenswrapper[4718]: I1124 08:50:42.904376 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-index-mqctp"] Nov 24 08:50:42 crc kubenswrapper[4718]: I1124 08:50:42.975230 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8f2g\" (UniqueName: \"kubernetes.io/projected/dbc3b982-3c01-4ad2-bb17-3be63e5fd3d3-kube-api-access-w8f2g\") pod \"horizon-operator-index-mqctp\" (UID: \"dbc3b982-3c01-4ad2-bb17-3be63e5fd3d3\") " pod="openstack-operators/horizon-operator-index-mqctp" Nov 24 08:50:43 crc kubenswrapper[4718]: I1124 08:50:43.076508 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8f2g\" (UniqueName: \"kubernetes.io/projected/dbc3b982-3c01-4ad2-bb17-3be63e5fd3d3-kube-api-access-w8f2g\") pod \"horizon-operator-index-mqctp\" (UID: \"dbc3b982-3c01-4ad2-bb17-3be63e5fd3d3\") " pod="openstack-operators/horizon-operator-index-mqctp" Nov 24 08:50:43 crc kubenswrapper[4718]: I1124 08:50:43.096058 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8f2g\" (UniqueName: \"kubernetes.io/projected/dbc3b982-3c01-4ad2-bb17-3be63e5fd3d3-kube-api-access-w8f2g\") pod \"horizon-operator-index-mqctp\" (UID: \"dbc3b982-3c01-4ad2-bb17-3be63e5fd3d3\") " pod="openstack-operators/horizon-operator-index-mqctp" Nov 24 08:50:43 crc kubenswrapper[4718]: I1124 08:50:43.214934 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-mqctp" Nov 24 08:50:43 crc kubenswrapper[4718]: I1124 08:50:43.533523 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-index-mqctp"] Nov 24 08:50:43 crc kubenswrapper[4718]: W1124 08:50:43.546677 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddbc3b982_3c01_4ad2_bb17_3be63e5fd3d3.slice/crio-d4858c66ea91e7be4008c30483a3862f31f8a3e89aaeaa64279c4dfe35065ec2 WatchSource:0}: Error finding container d4858c66ea91e7be4008c30483a3862f31f8a3e89aaeaa64279c4dfe35065ec2: Status 404 returned error can't find the container with id d4858c66ea91e7be4008c30483a3862f31f8a3e89aaeaa64279c4dfe35065ec2 Nov 24 08:50:43 crc kubenswrapper[4718]: I1124 08:50:43.727381 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-mqctp" event={"ID":"dbc3b982-3c01-4ad2-bb17-3be63e5fd3d3","Type":"ContainerStarted","Data":"d4858c66ea91e7be4008c30483a3862f31f8a3e89aaeaa64279c4dfe35065ec2"} Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.081102 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/keystone-f088-account-create-update-h2pdn"] Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.082481 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-f088-account-create-update-h2pdn" Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.085014 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-db-secret" Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.093781 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-f088-account-create-update-h2pdn"] Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.175815 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/keystone-db-create-2rvdl"] Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.176880 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-create-2rvdl" Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.184239 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-db-create-2rvdl"] Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.211998 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkvsx\" (UniqueName: \"kubernetes.io/projected/db5b4c90-8a71-4000-9666-93c6d8bd99db-kube-api-access-mkvsx\") pod \"keystone-f088-account-create-update-h2pdn\" (UID: \"db5b4c90-8a71-4000-9666-93c6d8bd99db\") " pod="glance-kuttl-tests/keystone-f088-account-create-update-h2pdn" Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.212075 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db5b4c90-8a71-4000-9666-93c6d8bd99db-operator-scripts\") pod \"keystone-f088-account-create-update-h2pdn\" (UID: \"db5b4c90-8a71-4000-9666-93c6d8bd99db\") " pod="glance-kuttl-tests/keystone-f088-account-create-update-h2pdn" Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.313474 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c8913b0-43ac-4afc-896f-8c82f07d2c56-operator-scripts\") pod \"keystone-db-create-2rvdl\" (UID: \"2c8913b0-43ac-4afc-896f-8c82f07d2c56\") " pod="glance-kuttl-tests/keystone-db-create-2rvdl" Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.313580 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db5b4c90-8a71-4000-9666-93c6d8bd99db-operator-scripts\") pod \"keystone-f088-account-create-update-h2pdn\" (UID: \"db5b4c90-8a71-4000-9666-93c6d8bd99db\") " pod="glance-kuttl-tests/keystone-f088-account-create-update-h2pdn" Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.313887 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wqxw\" (UniqueName: \"kubernetes.io/projected/2c8913b0-43ac-4afc-896f-8c82f07d2c56-kube-api-access-6wqxw\") pod \"keystone-db-create-2rvdl\" (UID: \"2c8913b0-43ac-4afc-896f-8c82f07d2c56\") " pod="glance-kuttl-tests/keystone-db-create-2rvdl" Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.313923 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkvsx\" (UniqueName: \"kubernetes.io/projected/db5b4c90-8a71-4000-9666-93c6d8bd99db-kube-api-access-mkvsx\") pod \"keystone-f088-account-create-update-h2pdn\" (UID: \"db5b4c90-8a71-4000-9666-93c6d8bd99db\") " pod="glance-kuttl-tests/keystone-f088-account-create-update-h2pdn" Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.314527 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db5b4c90-8a71-4000-9666-93c6d8bd99db-operator-scripts\") pod \"keystone-f088-account-create-update-h2pdn\" (UID: \"db5b4c90-8a71-4000-9666-93c6d8bd99db\") " pod="glance-kuttl-tests/keystone-f088-account-create-update-h2pdn" Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.332283 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkvsx\" (UniqueName: \"kubernetes.io/projected/db5b4c90-8a71-4000-9666-93c6d8bd99db-kube-api-access-mkvsx\") pod \"keystone-f088-account-create-update-h2pdn\" (UID: \"db5b4c90-8a71-4000-9666-93c6d8bd99db\") " pod="glance-kuttl-tests/keystone-f088-account-create-update-h2pdn" Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.401244 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-f088-account-create-update-h2pdn" Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.414873 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wqxw\" (UniqueName: \"kubernetes.io/projected/2c8913b0-43ac-4afc-896f-8c82f07d2c56-kube-api-access-6wqxw\") pod \"keystone-db-create-2rvdl\" (UID: \"2c8913b0-43ac-4afc-896f-8c82f07d2c56\") " pod="glance-kuttl-tests/keystone-db-create-2rvdl" Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.414981 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c8913b0-43ac-4afc-896f-8c82f07d2c56-operator-scripts\") pod \"keystone-db-create-2rvdl\" (UID: \"2c8913b0-43ac-4afc-896f-8c82f07d2c56\") " pod="glance-kuttl-tests/keystone-db-create-2rvdl" Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.415717 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c8913b0-43ac-4afc-896f-8c82f07d2c56-operator-scripts\") pod \"keystone-db-create-2rvdl\" (UID: \"2c8913b0-43ac-4afc-896f-8c82f07d2c56\") " pod="glance-kuttl-tests/keystone-db-create-2rvdl" Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.432023 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wqxw\" (UniqueName: \"kubernetes.io/projected/2c8913b0-43ac-4afc-896f-8c82f07d2c56-kube-api-access-6wqxw\") pod \"keystone-db-create-2rvdl\" (UID: \"2c8913b0-43ac-4afc-896f-8c82f07d2c56\") " pod="glance-kuttl-tests/keystone-db-create-2rvdl" Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.491674 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-create-2rvdl" Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.789713 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-f088-account-create-update-h2pdn"] Nov 24 08:50:45 crc kubenswrapper[4718]: W1124 08:50:45.807876 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb5b4c90_8a71_4000_9666_93c6d8bd99db.slice/crio-a2acf550d917d6619e84bc5883ac688bf664fbfbee15193988da7bd4f573dd48 WatchSource:0}: Error finding container a2acf550d917d6619e84bc5883ac688bf664fbfbee15193988da7bd4f573dd48: Status 404 returned error can't find the container with id a2acf550d917d6619e84bc5883ac688bf664fbfbee15193988da7bd4f573dd48 Nov 24 08:50:45 crc kubenswrapper[4718]: I1124 08:50:45.885073 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-db-create-2rvdl"] Nov 24 08:50:45 crc kubenswrapper[4718]: W1124 08:50:45.892322 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c8913b0_43ac_4afc_896f_8c82f07d2c56.slice/crio-bb30d0374d9d447720229206d77a6e1616754ae975f0cc615c43128a36772776 WatchSource:0}: Error finding container bb30d0374d9d447720229206d77a6e1616754ae975f0cc615c43128a36772776: Status 404 returned error can't find the container with id bb30d0374d9d447720229206d77a6e1616754ae975f0cc615c43128a36772776 Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.089847 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-index-76zdn"] Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.090772 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-76zdn" Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.092587 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-index-dockercfg-nt9tf" Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.098917 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-76zdn"] Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.225798 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2hb7\" (UniqueName: \"kubernetes.io/projected/675c1f95-97f0-4a23-9539-e2524c8211f6-kube-api-access-t2hb7\") pod \"swift-operator-index-76zdn\" (UID: \"675c1f95-97f0-4a23-9539-e2524c8211f6\") " pod="openstack-operators/swift-operator-index-76zdn" Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.327590 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2hb7\" (UniqueName: \"kubernetes.io/projected/675c1f95-97f0-4a23-9539-e2524c8211f6-kube-api-access-t2hb7\") pod \"swift-operator-index-76zdn\" (UID: \"675c1f95-97f0-4a23-9539-e2524c8211f6\") " pod="openstack-operators/swift-operator-index-76zdn" Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.347641 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2hb7\" (UniqueName: \"kubernetes.io/projected/675c1f95-97f0-4a23-9539-e2524c8211f6-kube-api-access-t2hb7\") pod \"swift-operator-index-76zdn\" (UID: \"675c1f95-97f0-4a23-9539-e2524c8211f6\") " pod="openstack-operators/swift-operator-index-76zdn" Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.442727 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-76zdn" Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.642025 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-76zdn"] Nov 24 08:50:46 crc kubenswrapper[4718]: W1124 08:50:46.648316 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod675c1f95_97f0_4a23_9539_e2524c8211f6.slice/crio-6cab2034430669a02224f1dd2a655939df78d8cc2d9bfa748a21c30c942a4a8f WatchSource:0}: Error finding container 6cab2034430669a02224f1dd2a655939df78d8cc2d9bfa748a21c30c942a4a8f: Status 404 returned error can't find the container with id 6cab2034430669a02224f1dd2a655939df78d8cc2d9bfa748a21c30c942a4a8f Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.650361 4718 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.746287 4718 generic.go:334] "Generic (PLEG): container finished" podID="2c8913b0-43ac-4afc-896f-8c82f07d2c56" containerID="6b0badc2ba3ca3d440201b4d63b16e615de176deb1853928db4f2ada532867e7" exitCode=0 Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.746344 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-create-2rvdl" event={"ID":"2c8913b0-43ac-4afc-896f-8c82f07d2c56","Type":"ContainerDied","Data":"6b0badc2ba3ca3d440201b4d63b16e615de176deb1853928db4f2ada532867e7"} Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.746657 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-create-2rvdl" event={"ID":"2c8913b0-43ac-4afc-896f-8c82f07d2c56","Type":"ContainerStarted","Data":"bb30d0374d9d447720229206d77a6e1616754ae975f0cc615c43128a36772776"} Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.748771 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-mqctp" event={"ID":"dbc3b982-3c01-4ad2-bb17-3be63e5fd3d3","Type":"ContainerStarted","Data":"686d090b1a8a6fc790310c5952f66ec8c340294d0ac5827ebdf4c19bfcf019b1"} Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.750046 4718 generic.go:334] "Generic (PLEG): container finished" podID="db5b4c90-8a71-4000-9666-93c6d8bd99db" containerID="ec56abd82bb090b0d5bb352fdf038920ba24a10e6d5644da9239092c43262b2b" exitCode=0 Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.750099 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-f088-account-create-update-h2pdn" event={"ID":"db5b4c90-8a71-4000-9666-93c6d8bd99db","Type":"ContainerDied","Data":"ec56abd82bb090b0d5bb352fdf038920ba24a10e6d5644da9239092c43262b2b"} Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.750116 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-f088-account-create-update-h2pdn" event={"ID":"db5b4c90-8a71-4000-9666-93c6d8bd99db","Type":"ContainerStarted","Data":"a2acf550d917d6619e84bc5883ac688bf664fbfbee15193988da7bd4f573dd48"} Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.750799 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-76zdn" event={"ID":"675c1f95-97f0-4a23-9539-e2524c8211f6","Type":"ContainerStarted","Data":"6cab2034430669a02224f1dd2a655939df78d8cc2d9bfa748a21c30c942a4a8f"} Nov 24 08:50:46 crc kubenswrapper[4718]: I1124 08:50:46.774631 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-index-mqctp" podStartSLOduration=2.5110578869999998 podStartE2EDuration="4.774028423s" podCreationTimestamp="2025-11-24 08:50:42 +0000 UTC" firstStartedPulling="2025-11-24 08:50:43.548687669 +0000 UTC m=+915.664978573" lastFinishedPulling="2025-11-24 08:50:45.811658205 +0000 UTC m=+917.927949109" observedRunningTime="2025-11-24 08:50:46.772928677 +0000 UTC m=+918.889219591" watchObservedRunningTime="2025-11-24 08:50:46.774028423 +0000 UTC m=+918.890319327" Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.053530 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-create-2rvdl" Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.062495 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-f088-account-create-update-h2pdn" Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.168581 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wqxw\" (UniqueName: \"kubernetes.io/projected/2c8913b0-43ac-4afc-896f-8c82f07d2c56-kube-api-access-6wqxw\") pod \"2c8913b0-43ac-4afc-896f-8c82f07d2c56\" (UID: \"2c8913b0-43ac-4afc-896f-8c82f07d2c56\") " Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.168647 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkvsx\" (UniqueName: \"kubernetes.io/projected/db5b4c90-8a71-4000-9666-93c6d8bd99db-kube-api-access-mkvsx\") pod \"db5b4c90-8a71-4000-9666-93c6d8bd99db\" (UID: \"db5b4c90-8a71-4000-9666-93c6d8bd99db\") " Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.168708 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db5b4c90-8a71-4000-9666-93c6d8bd99db-operator-scripts\") pod \"db5b4c90-8a71-4000-9666-93c6d8bd99db\" (UID: \"db5b4c90-8a71-4000-9666-93c6d8bd99db\") " Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.168771 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c8913b0-43ac-4afc-896f-8c82f07d2c56-operator-scripts\") pod \"2c8913b0-43ac-4afc-896f-8c82f07d2c56\" (UID: \"2c8913b0-43ac-4afc-896f-8c82f07d2c56\") " Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.169867 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c8913b0-43ac-4afc-896f-8c82f07d2c56-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2c8913b0-43ac-4afc-896f-8c82f07d2c56" (UID: "2c8913b0-43ac-4afc-896f-8c82f07d2c56"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.169877 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db5b4c90-8a71-4000-9666-93c6d8bd99db-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "db5b4c90-8a71-4000-9666-93c6d8bd99db" (UID: "db5b4c90-8a71-4000-9666-93c6d8bd99db"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.174048 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c8913b0-43ac-4afc-896f-8c82f07d2c56-kube-api-access-6wqxw" (OuterVolumeSpecName: "kube-api-access-6wqxw") pod "2c8913b0-43ac-4afc-896f-8c82f07d2c56" (UID: "2c8913b0-43ac-4afc-896f-8c82f07d2c56"). InnerVolumeSpecName "kube-api-access-6wqxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.174094 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db5b4c90-8a71-4000-9666-93c6d8bd99db-kube-api-access-mkvsx" (OuterVolumeSpecName: "kube-api-access-mkvsx") pod "db5b4c90-8a71-4000-9666-93c6d8bd99db" (UID: "db5b4c90-8a71-4000-9666-93c6d8bd99db"). InnerVolumeSpecName "kube-api-access-mkvsx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.270398 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wqxw\" (UniqueName: \"kubernetes.io/projected/2c8913b0-43ac-4afc-896f-8c82f07d2c56-kube-api-access-6wqxw\") on node \"crc\" DevicePath \"\"" Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.270431 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkvsx\" (UniqueName: \"kubernetes.io/projected/db5b4c90-8a71-4000-9666-93c6d8bd99db-kube-api-access-mkvsx\") on node \"crc\" DevicePath \"\"" Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.270445 4718 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db5b4c90-8a71-4000-9666-93c6d8bd99db-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.270454 4718 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c8913b0-43ac-4afc-896f-8c82f07d2c56-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.763427 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-f088-account-create-update-h2pdn" event={"ID":"db5b4c90-8a71-4000-9666-93c6d8bd99db","Type":"ContainerDied","Data":"a2acf550d917d6619e84bc5883ac688bf664fbfbee15193988da7bd4f573dd48"} Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.763760 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2acf550d917d6619e84bc5883ac688bf664fbfbee15193988da7bd4f573dd48" Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.763455 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-f088-account-create-update-h2pdn" Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.764724 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-create-2rvdl" event={"ID":"2c8913b0-43ac-4afc-896f-8c82f07d2c56","Type":"ContainerDied","Data":"bb30d0374d9d447720229206d77a6e1616754ae975f0cc615c43128a36772776"} Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.764761 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb30d0374d9d447720229206d77a6e1616754ae975f0cc615c43128a36772776" Nov 24 08:50:48 crc kubenswrapper[4718]: I1124 08:50:48.764763 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-create-2rvdl" Nov 24 08:50:49 crc kubenswrapper[4718]: I1124 08:50:49.772319 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-76zdn" event={"ID":"675c1f95-97f0-4a23-9539-e2524c8211f6","Type":"ContainerStarted","Data":"357f4cd4e1e6b7cc313eed872d5a6890d4ff018f3195cff67905a860eb4b2abc"} Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.664490 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-index-76zdn" podStartSLOduration=2.270792104 podStartE2EDuration="4.664471217s" podCreationTimestamp="2025-11-24 08:50:46 +0000 UTC" firstStartedPulling="2025-11-24 08:50:46.649954066 +0000 UTC m=+918.766244990" lastFinishedPulling="2025-11-24 08:50:49.043633199 +0000 UTC m=+921.159924103" observedRunningTime="2025-11-24 08:50:49.787016459 +0000 UTC m=+921.903307363" watchObservedRunningTime="2025-11-24 08:50:50.664471217 +0000 UTC m=+922.780762121" Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.666780 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/keystone-db-sync-nwsdw"] Nov 24 08:50:50 crc kubenswrapper[4718]: E1124 08:50:50.667470 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db5b4c90-8a71-4000-9666-93c6d8bd99db" containerName="mariadb-account-create-update" Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.667497 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="db5b4c90-8a71-4000-9666-93c6d8bd99db" containerName="mariadb-account-create-update" Nov 24 08:50:50 crc kubenswrapper[4718]: E1124 08:50:50.667521 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c8913b0-43ac-4afc-896f-8c82f07d2c56" containerName="mariadb-database-create" Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.667531 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c8913b0-43ac-4afc-896f-8c82f07d2c56" containerName="mariadb-database-create" Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.668708 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c8913b0-43ac-4afc-896f-8c82f07d2c56" containerName="mariadb-database-create" Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.668770 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="db5b4c90-8a71-4000-9666-93c6d8bd99db" containerName="mariadb-account-create-update" Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.669612 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-sync-nwsdw" Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.672662 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone" Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.678990 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-db-sync-nwsdw"] Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.681390 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-scripts" Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.681460 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-keystone-dockercfg-qrth6" Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.682721 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-config-data" Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.812905 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qx2jt\" (UniqueName: \"kubernetes.io/projected/a8211a8c-2e51-4031-b0ef-4831c0c97924-kube-api-access-qx2jt\") pod \"keystone-db-sync-nwsdw\" (UID: \"a8211a8c-2e51-4031-b0ef-4831c0c97924\") " pod="glance-kuttl-tests/keystone-db-sync-nwsdw" Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.813019 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8211a8c-2e51-4031-b0ef-4831c0c97924-config-data\") pod \"keystone-db-sync-nwsdw\" (UID: \"a8211a8c-2e51-4031-b0ef-4831c0c97924\") " pod="glance-kuttl-tests/keystone-db-sync-nwsdw" Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.914434 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qx2jt\" (UniqueName: \"kubernetes.io/projected/a8211a8c-2e51-4031-b0ef-4831c0c97924-kube-api-access-qx2jt\") pod \"keystone-db-sync-nwsdw\" (UID: \"a8211a8c-2e51-4031-b0ef-4831c0c97924\") " pod="glance-kuttl-tests/keystone-db-sync-nwsdw" Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.914513 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8211a8c-2e51-4031-b0ef-4831c0c97924-config-data\") pod \"keystone-db-sync-nwsdw\" (UID: \"a8211a8c-2e51-4031-b0ef-4831c0c97924\") " pod="glance-kuttl-tests/keystone-db-sync-nwsdw" Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.929928 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qx2jt\" (UniqueName: \"kubernetes.io/projected/a8211a8c-2e51-4031-b0ef-4831c0c97924-kube-api-access-qx2jt\") pod \"keystone-db-sync-nwsdw\" (UID: \"a8211a8c-2e51-4031-b0ef-4831c0c97924\") " pod="glance-kuttl-tests/keystone-db-sync-nwsdw" Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.930089 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8211a8c-2e51-4031-b0ef-4831c0c97924-config-data\") pod \"keystone-db-sync-nwsdw\" (UID: \"a8211a8c-2e51-4031-b0ef-4831c0c97924\") " pod="glance-kuttl-tests/keystone-db-sync-nwsdw" Nov 24 08:50:50 crc kubenswrapper[4718]: I1124 08:50:50.994361 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-sync-nwsdw" Nov 24 08:50:51 crc kubenswrapper[4718]: I1124 08:50:51.393453 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-db-sync-nwsdw"] Nov 24 08:50:51 crc kubenswrapper[4718]: I1124 08:50:51.784517 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-sync-nwsdw" event={"ID":"a8211a8c-2e51-4031-b0ef-4831c0c97924","Type":"ContainerStarted","Data":"42f9999782af37fb5cd42dbe57cf073a8e579fbe7c1eb241e04a275511622469"} Nov 24 08:50:52 crc kubenswrapper[4718]: I1124 08:50:52.044747 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:50:52 crc kubenswrapper[4718]: I1124 08:50:52.044803 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:50:52 crc kubenswrapper[4718]: I1124 08:50:52.483327 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/swift-operator-index-76zdn"] Nov 24 08:50:52 crc kubenswrapper[4718]: I1124 08:50:52.483566 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/swift-operator-index-76zdn" podUID="675c1f95-97f0-4a23-9539-e2524c8211f6" containerName="registry-server" containerID="cri-o://357f4cd4e1e6b7cc313eed872d5a6890d4ff018f3195cff67905a860eb4b2abc" gracePeriod=2 Nov 24 08:50:52 crc kubenswrapper[4718]: I1124 08:50:52.792431 4718 generic.go:334] "Generic (PLEG): container finished" podID="675c1f95-97f0-4a23-9539-e2524c8211f6" containerID="357f4cd4e1e6b7cc313eed872d5a6890d4ff018f3195cff67905a860eb4b2abc" exitCode=0 Nov 24 08:50:52 crc kubenswrapper[4718]: I1124 08:50:52.792546 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-76zdn" event={"ID":"675c1f95-97f0-4a23-9539-e2524c8211f6","Type":"ContainerDied","Data":"357f4cd4e1e6b7cc313eed872d5a6890d4ff018f3195cff67905a860eb4b2abc"} Nov 24 08:50:52 crc kubenswrapper[4718]: I1124 08:50:52.856695 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-76zdn" Nov 24 08:50:52 crc kubenswrapper[4718]: I1124 08:50:52.943050 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t2hb7\" (UniqueName: \"kubernetes.io/projected/675c1f95-97f0-4a23-9539-e2524c8211f6-kube-api-access-t2hb7\") pod \"675c1f95-97f0-4a23-9539-e2524c8211f6\" (UID: \"675c1f95-97f0-4a23-9539-e2524c8211f6\") " Nov 24 08:50:52 crc kubenswrapper[4718]: I1124 08:50:52.949333 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/675c1f95-97f0-4a23-9539-e2524c8211f6-kube-api-access-t2hb7" (OuterVolumeSpecName: "kube-api-access-t2hb7") pod "675c1f95-97f0-4a23-9539-e2524c8211f6" (UID: "675c1f95-97f0-4a23-9539-e2524c8211f6"). InnerVolumeSpecName "kube-api-access-t2hb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.044475 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t2hb7\" (UniqueName: \"kubernetes.io/projected/675c1f95-97f0-4a23-9539-e2524c8211f6-kube-api-access-t2hb7\") on node \"crc\" DevicePath \"\"" Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.215768 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/horizon-operator-index-mqctp" Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.215805 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-index-mqctp" Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.247854 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/horizon-operator-index-mqctp" Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.295368 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-index-7vr99"] Nov 24 08:50:53 crc kubenswrapper[4718]: E1124 08:50:53.295898 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="675c1f95-97f0-4a23-9539-e2524c8211f6" containerName="registry-server" Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.295915 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="675c1f95-97f0-4a23-9539-e2524c8211f6" containerName="registry-server" Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.296135 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="675c1f95-97f0-4a23-9539-e2524c8211f6" containerName="registry-server" Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.296896 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-7vr99" Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.303331 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-7vr99"] Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.448887 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rw2hz\" (UniqueName: \"kubernetes.io/projected/7deab911-994f-4484-8961-fd426a52aa55-kube-api-access-rw2hz\") pod \"swift-operator-index-7vr99\" (UID: \"7deab911-994f-4484-8961-fd426a52aa55\") " pod="openstack-operators/swift-operator-index-7vr99" Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.551054 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rw2hz\" (UniqueName: \"kubernetes.io/projected/7deab911-994f-4484-8961-fd426a52aa55-kube-api-access-rw2hz\") pod \"swift-operator-index-7vr99\" (UID: \"7deab911-994f-4484-8961-fd426a52aa55\") " pod="openstack-operators/swift-operator-index-7vr99" Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.569617 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rw2hz\" (UniqueName: \"kubernetes.io/projected/7deab911-994f-4484-8961-fd426a52aa55-kube-api-access-rw2hz\") pod \"swift-operator-index-7vr99\" (UID: \"7deab911-994f-4484-8961-fd426a52aa55\") " pod="openstack-operators/swift-operator-index-7vr99" Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.626550 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-7vr99" Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.804783 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-76zdn" event={"ID":"675c1f95-97f0-4a23-9539-e2524c8211f6","Type":"ContainerDied","Data":"6cab2034430669a02224f1dd2a655939df78d8cc2d9bfa748a21c30c942a4a8f"} Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.804814 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-76zdn" Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.805131 4718 scope.go:117] "RemoveContainer" containerID="357f4cd4e1e6b7cc313eed872d5a6890d4ff018f3195cff67905a860eb4b2abc" Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.836533 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/swift-operator-index-76zdn"] Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.836597 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-index-mqctp" Nov 24 08:50:53 crc kubenswrapper[4718]: I1124 08:50:53.840851 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/swift-operator-index-76zdn"] Nov 24 08:50:54 crc kubenswrapper[4718]: I1124 08:50:54.014932 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-7vr99"] Nov 24 08:50:54 crc kubenswrapper[4718]: W1124 08:50:54.403497 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7deab911_994f_4484_8961_fd426a52aa55.slice/crio-9658cc174277ae46562cdf5722126c0a729752e4799d33fae747ff97593bba77 WatchSource:0}: Error finding container 9658cc174277ae46562cdf5722126c0a729752e4799d33fae747ff97593bba77: Status 404 returned error can't find the container with id 9658cc174277ae46562cdf5722126c0a729752e4799d33fae747ff97593bba77 Nov 24 08:50:54 crc kubenswrapper[4718]: I1124 08:50:54.608237 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="675c1f95-97f0-4a23-9539-e2524c8211f6" path="/var/lib/kubelet/pods/675c1f95-97f0-4a23-9539-e2524c8211f6/volumes" Nov 24 08:50:54 crc kubenswrapper[4718]: I1124 08:50:54.816317 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-7vr99" event={"ID":"7deab911-994f-4484-8961-fd426a52aa55","Type":"ContainerStarted","Data":"9658cc174277ae46562cdf5722126c0a729752e4799d33fae747ff97593bba77"} Nov 24 08:50:55 crc kubenswrapper[4718]: I1124 08:50:55.823642 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-7vr99" event={"ID":"7deab911-994f-4484-8961-fd426a52aa55","Type":"ContainerStarted","Data":"cd631092b7681226afa14a896093e930b3c2a227a9a38ffb0562caf86d5ff39d"} Nov 24 08:50:58 crc kubenswrapper[4718]: I1124 08:50:58.617249 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-index-7vr99" podStartSLOduration=5.212001001 podStartE2EDuration="5.617229943s" podCreationTimestamp="2025-11-24 08:50:53 +0000 UTC" firstStartedPulling="2025-11-24 08:50:54.409210205 +0000 UTC m=+926.525501109" lastFinishedPulling="2025-11-24 08:50:54.814439147 +0000 UTC m=+926.930730051" observedRunningTime="2025-11-24 08:50:55.841208576 +0000 UTC m=+927.957499480" watchObservedRunningTime="2025-11-24 08:50:58.617229943 +0000 UTC m=+930.733520847" Nov 24 08:51:00 crc kubenswrapper[4718]: I1124 08:51:00.855614 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-sync-nwsdw" event={"ID":"a8211a8c-2e51-4031-b0ef-4831c0c97924","Type":"ContainerStarted","Data":"adf7aa49d3532a360b6fc92ca69aeff2f29ef66d4dfac87f647ddb1875778524"} Nov 24 08:51:00 crc kubenswrapper[4718]: I1124 08:51:00.872445 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/keystone-db-sync-nwsdw" podStartSLOduration=2.081263383 podStartE2EDuration="10.872427334s" podCreationTimestamp="2025-11-24 08:50:50 +0000 UTC" firstStartedPulling="2025-11-24 08:50:51.404899106 +0000 UTC m=+923.521190010" lastFinishedPulling="2025-11-24 08:51:00.196063057 +0000 UTC m=+932.312353961" observedRunningTime="2025-11-24 08:51:00.868939231 +0000 UTC m=+932.985230145" watchObservedRunningTime="2025-11-24 08:51:00.872427334 +0000 UTC m=+932.988718238" Nov 24 08:51:03 crc kubenswrapper[4718]: I1124 08:51:03.627714 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/swift-operator-index-7vr99" Nov 24 08:51:03 crc kubenswrapper[4718]: I1124 08:51:03.628077 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-index-7vr99" Nov 24 08:51:03 crc kubenswrapper[4718]: I1124 08:51:03.654732 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/swift-operator-index-7vr99" Nov 24 08:51:03 crc kubenswrapper[4718]: I1124 08:51:03.873636 4718 generic.go:334] "Generic (PLEG): container finished" podID="a8211a8c-2e51-4031-b0ef-4831c0c97924" containerID="adf7aa49d3532a360b6fc92ca69aeff2f29ef66d4dfac87f647ddb1875778524" exitCode=0 Nov 24 08:51:03 crc kubenswrapper[4718]: I1124 08:51:03.873906 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-sync-nwsdw" event={"ID":"a8211a8c-2e51-4031-b0ef-4831c0c97924","Type":"ContainerDied","Data":"adf7aa49d3532a360b6fc92ca69aeff2f29ef66d4dfac87f647ddb1875778524"} Nov 24 08:51:03 crc kubenswrapper[4718]: I1124 08:51:03.905435 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-index-7vr99" Nov 24 08:51:05 crc kubenswrapper[4718]: I1124 08:51:05.174669 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-sync-nwsdw" Nov 24 08:51:05 crc kubenswrapper[4718]: I1124 08:51:05.324219 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qx2jt\" (UniqueName: \"kubernetes.io/projected/a8211a8c-2e51-4031-b0ef-4831c0c97924-kube-api-access-qx2jt\") pod \"a8211a8c-2e51-4031-b0ef-4831c0c97924\" (UID: \"a8211a8c-2e51-4031-b0ef-4831c0c97924\") " Nov 24 08:51:05 crc kubenswrapper[4718]: I1124 08:51:05.324301 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8211a8c-2e51-4031-b0ef-4831c0c97924-config-data\") pod \"a8211a8c-2e51-4031-b0ef-4831c0c97924\" (UID: \"a8211a8c-2e51-4031-b0ef-4831c0c97924\") " Nov 24 08:51:05 crc kubenswrapper[4718]: I1124 08:51:05.345326 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8211a8c-2e51-4031-b0ef-4831c0c97924-kube-api-access-qx2jt" (OuterVolumeSpecName: "kube-api-access-qx2jt") pod "a8211a8c-2e51-4031-b0ef-4831c0c97924" (UID: "a8211a8c-2e51-4031-b0ef-4831c0c97924"). InnerVolumeSpecName "kube-api-access-qx2jt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:51:05 crc kubenswrapper[4718]: I1124 08:51:05.364540 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8211a8c-2e51-4031-b0ef-4831c0c97924-config-data" (OuterVolumeSpecName: "config-data") pod "a8211a8c-2e51-4031-b0ef-4831c0c97924" (UID: "a8211a8c-2e51-4031-b0ef-4831c0c97924"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:51:05 crc kubenswrapper[4718]: I1124 08:51:05.425560 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qx2jt\" (UniqueName: \"kubernetes.io/projected/a8211a8c-2e51-4031-b0ef-4831c0c97924-kube-api-access-qx2jt\") on node \"crc\" DevicePath \"\"" Nov 24 08:51:05 crc kubenswrapper[4718]: I1124 08:51:05.425610 4718 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8211a8c-2e51-4031-b0ef-4831c0c97924-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 08:51:05 crc kubenswrapper[4718]: I1124 08:51:05.887759 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-sync-nwsdw" event={"ID":"a8211a8c-2e51-4031-b0ef-4831c0c97924","Type":"ContainerDied","Data":"42f9999782af37fb5cd42dbe57cf073a8e579fbe7c1eb241e04a275511622469"} Nov 24 08:51:05 crc kubenswrapper[4718]: I1124 08:51:05.888124 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42f9999782af37fb5cd42dbe57cf073a8e579fbe7c1eb241e04a275511622469" Nov 24 08:51:05 crc kubenswrapper[4718]: I1124 08:51:05.887826 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-sync-nwsdw" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.067681 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/keystone-bootstrap-wrd7l"] Nov 24 08:51:06 crc kubenswrapper[4718]: E1124 08:51:06.067922 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8211a8c-2e51-4031-b0ef-4831c0c97924" containerName="keystone-db-sync" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.067934 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8211a8c-2e51-4031-b0ef-4831c0c97924" containerName="keystone-db-sync" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.068075 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8211a8c-2e51-4031-b0ef-4831c0c97924" containerName="keystone-db-sync" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.068482 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.070322 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-keystone-dockercfg-qrth6" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.070850 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-scripts" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.070899 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"osp-secret" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.070930 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.072236 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-config-data" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.082361 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-bootstrap-wrd7l"] Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.133643 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-scripts\") pod \"keystone-bootstrap-wrd7l\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.133888 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-config-data\") pod \"keystone-bootstrap-wrd7l\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.134026 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-credential-keys\") pod \"keystone-bootstrap-wrd7l\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.134109 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5gzp\" (UniqueName: \"kubernetes.io/projected/8fb11712-1346-4bba-9ee1-eaa809ec3b42-kube-api-access-z5gzp\") pod \"keystone-bootstrap-wrd7l\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.134247 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-fernet-keys\") pod \"keystone-bootstrap-wrd7l\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.235646 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-scripts\") pod \"keystone-bootstrap-wrd7l\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.236653 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-config-data\") pod \"keystone-bootstrap-wrd7l\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.236749 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-credential-keys\") pod \"keystone-bootstrap-wrd7l\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.236864 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5gzp\" (UniqueName: \"kubernetes.io/projected/8fb11712-1346-4bba-9ee1-eaa809ec3b42-kube-api-access-z5gzp\") pod \"keystone-bootstrap-wrd7l\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.237051 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-fernet-keys\") pod \"keystone-bootstrap-wrd7l\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.239438 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-scripts\") pod \"keystone-bootstrap-wrd7l\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.245651 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-fernet-keys\") pod \"keystone-bootstrap-wrd7l\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.248708 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-config-data\") pod \"keystone-bootstrap-wrd7l\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.249080 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-credential-keys\") pod \"keystone-bootstrap-wrd7l\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.278582 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5gzp\" (UniqueName: \"kubernetes.io/projected/8fb11712-1346-4bba-9ee1-eaa809ec3b42-kube-api-access-z5gzp\") pod \"keystone-bootstrap-wrd7l\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.383053 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.766742 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-bootstrap-wrd7l"] Nov 24 08:51:06 crc kubenswrapper[4718]: W1124 08:51:06.775696 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8fb11712_1346_4bba_9ee1_eaa809ec3b42.slice/crio-ec442752644d81147115ad93b5f252434b53b0aa2078d43f97386eb1ae7730ea WatchSource:0}: Error finding container ec442752644d81147115ad93b5f252434b53b0aa2078d43f97386eb1ae7730ea: Status 404 returned error can't find the container with id ec442752644d81147115ad93b5f252434b53b0aa2078d43f97386eb1ae7730ea Nov 24 08:51:06 crc kubenswrapper[4718]: I1124 08:51:06.896493 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" event={"ID":"8fb11712-1346-4bba-9ee1-eaa809ec3b42","Type":"ContainerStarted","Data":"ec442752644d81147115ad93b5f252434b53b0aa2078d43f97386eb1ae7730ea"} Nov 24 08:51:07 crc kubenswrapper[4718]: I1124 08:51:07.911652 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" event={"ID":"8fb11712-1346-4bba-9ee1-eaa809ec3b42","Type":"ContainerStarted","Data":"467e58e87863e217f10ddbcfe1df8b68c537a24a4e2bd68c2535f12942ff733a"} Nov 24 08:51:07 crc kubenswrapper[4718]: I1124 08:51:07.938504 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" podStartSLOduration=1.9384721200000001 podStartE2EDuration="1.93847212s" podCreationTimestamp="2025-11-24 08:51:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:51:07.937157858 +0000 UTC m=+940.053448772" watchObservedRunningTime="2025-11-24 08:51:07.93847212 +0000 UTC m=+940.054763034" Nov 24 08:51:09 crc kubenswrapper[4718]: I1124 08:51:09.927205 4718 generic.go:334] "Generic (PLEG): container finished" podID="8fb11712-1346-4bba-9ee1-eaa809ec3b42" containerID="467e58e87863e217f10ddbcfe1df8b68c537a24a4e2bd68c2535f12942ff733a" exitCode=0 Nov 24 08:51:09 crc kubenswrapper[4718]: I1124 08:51:09.927290 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" event={"ID":"8fb11712-1346-4bba-9ee1-eaa809ec3b42","Type":"ContainerDied","Data":"467e58e87863e217f10ddbcfe1df8b68c537a24a4e2bd68c2535f12942ff733a"} Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.237636 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.304709 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-config-data\") pod \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.304796 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-credential-keys\") pod \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.305062 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-fernet-keys\") pod \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.305116 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5gzp\" (UniqueName: \"kubernetes.io/projected/8fb11712-1346-4bba-9ee1-eaa809ec3b42-kube-api-access-z5gzp\") pod \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.305152 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-scripts\") pod \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\" (UID: \"8fb11712-1346-4bba-9ee1-eaa809ec3b42\") " Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.310190 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fb11712-1346-4bba-9ee1-eaa809ec3b42-kube-api-access-z5gzp" (OuterVolumeSpecName: "kube-api-access-z5gzp") pod "8fb11712-1346-4bba-9ee1-eaa809ec3b42" (UID: "8fb11712-1346-4bba-9ee1-eaa809ec3b42"). InnerVolumeSpecName "kube-api-access-z5gzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.319658 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "8fb11712-1346-4bba-9ee1-eaa809ec3b42" (UID: "8fb11712-1346-4bba-9ee1-eaa809ec3b42"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.319817 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-scripts" (OuterVolumeSpecName: "scripts") pod "8fb11712-1346-4bba-9ee1-eaa809ec3b42" (UID: "8fb11712-1346-4bba-9ee1-eaa809ec3b42"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.320987 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8fb11712-1346-4bba-9ee1-eaa809ec3b42" (UID: "8fb11712-1346-4bba-9ee1-eaa809ec3b42"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.322271 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-config-data" (OuterVolumeSpecName: "config-data") pod "8fb11712-1346-4bba-9ee1-eaa809ec3b42" (UID: "8fb11712-1346-4bba-9ee1-eaa809ec3b42"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.407125 4718 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.407184 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5gzp\" (UniqueName: \"kubernetes.io/projected/8fb11712-1346-4bba-9ee1-eaa809ec3b42-kube-api-access-z5gzp\") on node \"crc\" DevicePath \"\"" Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.407196 4718 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.407205 4718 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.407213 4718 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8fb11712-1346-4bba-9ee1-eaa809ec3b42-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.941435 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" event={"ID":"8fb11712-1346-4bba-9ee1-eaa809ec3b42","Type":"ContainerDied","Data":"ec442752644d81147115ad93b5f252434b53b0aa2078d43f97386eb1ae7730ea"} Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.941478 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec442752644d81147115ad93b5f252434b53b0aa2078d43f97386eb1ae7730ea" Nov 24 08:51:11 crc kubenswrapper[4718]: I1124 08:51:11.941486 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-bootstrap-wrd7l" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.016503 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/keystone-668c488b5b-8jjjq"] Nov 24 08:51:12 crc kubenswrapper[4718]: E1124 08:51:12.019683 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fb11712-1346-4bba-9ee1-eaa809ec3b42" containerName="keystone-bootstrap" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.019714 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fb11712-1346-4bba-9ee1-eaa809ec3b42" containerName="keystone-bootstrap" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.020092 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fb11712-1346-4bba-9ee1-eaa809ec3b42" containerName="keystone-bootstrap" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.021026 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.023211 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-scripts" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.023761 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-keystone-dockercfg-qrth6" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.023485 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-config-data" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.023571 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.031824 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-668c488b5b-8jjjq"] Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.119533 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/369cd9aa-3e04-4677-b0a4-f9e3422f7944-scripts\") pod \"keystone-668c488b5b-8jjjq\" (UID: \"369cd9aa-3e04-4677-b0a4-f9e3422f7944\") " pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.119589 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/369cd9aa-3e04-4677-b0a4-f9e3422f7944-credential-keys\") pod \"keystone-668c488b5b-8jjjq\" (UID: \"369cd9aa-3e04-4677-b0a4-f9e3422f7944\") " pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.119719 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp9v8\" (UniqueName: \"kubernetes.io/projected/369cd9aa-3e04-4677-b0a4-f9e3422f7944-kube-api-access-rp9v8\") pod \"keystone-668c488b5b-8jjjq\" (UID: \"369cd9aa-3e04-4677-b0a4-f9e3422f7944\") " pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.119770 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/369cd9aa-3e04-4677-b0a4-f9e3422f7944-config-data\") pod \"keystone-668c488b5b-8jjjq\" (UID: \"369cd9aa-3e04-4677-b0a4-f9e3422f7944\") " pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.119796 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/369cd9aa-3e04-4677-b0a4-f9e3422f7944-fernet-keys\") pod \"keystone-668c488b5b-8jjjq\" (UID: \"369cd9aa-3e04-4677-b0a4-f9e3422f7944\") " pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.221579 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/369cd9aa-3e04-4677-b0a4-f9e3422f7944-config-data\") pod \"keystone-668c488b5b-8jjjq\" (UID: \"369cd9aa-3e04-4677-b0a4-f9e3422f7944\") " pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.221628 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/369cd9aa-3e04-4677-b0a4-f9e3422f7944-fernet-keys\") pod \"keystone-668c488b5b-8jjjq\" (UID: \"369cd9aa-3e04-4677-b0a4-f9e3422f7944\") " pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.221685 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/369cd9aa-3e04-4677-b0a4-f9e3422f7944-scripts\") pod \"keystone-668c488b5b-8jjjq\" (UID: \"369cd9aa-3e04-4677-b0a4-f9e3422f7944\") " pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.221752 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/369cd9aa-3e04-4677-b0a4-f9e3422f7944-credential-keys\") pod \"keystone-668c488b5b-8jjjq\" (UID: \"369cd9aa-3e04-4677-b0a4-f9e3422f7944\") " pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.221807 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp9v8\" (UniqueName: \"kubernetes.io/projected/369cd9aa-3e04-4677-b0a4-f9e3422f7944-kube-api-access-rp9v8\") pod \"keystone-668c488b5b-8jjjq\" (UID: \"369cd9aa-3e04-4677-b0a4-f9e3422f7944\") " pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.225755 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/369cd9aa-3e04-4677-b0a4-f9e3422f7944-fernet-keys\") pod \"keystone-668c488b5b-8jjjq\" (UID: \"369cd9aa-3e04-4677-b0a4-f9e3422f7944\") " pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.226146 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/369cd9aa-3e04-4677-b0a4-f9e3422f7944-scripts\") pod \"keystone-668c488b5b-8jjjq\" (UID: \"369cd9aa-3e04-4677-b0a4-f9e3422f7944\") " pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.226321 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/369cd9aa-3e04-4677-b0a4-f9e3422f7944-config-data\") pod \"keystone-668c488b5b-8jjjq\" (UID: \"369cd9aa-3e04-4677-b0a4-f9e3422f7944\") " pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.229614 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/369cd9aa-3e04-4677-b0a4-f9e3422f7944-credential-keys\") pod \"keystone-668c488b5b-8jjjq\" (UID: \"369cd9aa-3e04-4677-b0a4-f9e3422f7944\") " pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.244321 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp9v8\" (UniqueName: \"kubernetes.io/projected/369cd9aa-3e04-4677-b0a4-f9e3422f7944-kube-api-access-rp9v8\") pod \"keystone-668c488b5b-8jjjq\" (UID: \"369cd9aa-3e04-4677-b0a4-f9e3422f7944\") " pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.345915 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.775817 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-668c488b5b-8jjjq"] Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.949060 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" event={"ID":"369cd9aa-3e04-4677-b0a4-f9e3422f7944","Type":"ContainerStarted","Data":"5c392ab3de31b4f3ca475ce85d61cda532eae1dcdbf2b56b5410c9d05f2c0510"} Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.949425 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" event={"ID":"369cd9aa-3e04-4677-b0a4-f9e3422f7944","Type":"ContainerStarted","Data":"2bdbbc2125ec105847edefa75b38102adea9b29fe7d7cba571a543b9eaac2e19"} Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.949445 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:12 crc kubenswrapper[4718]: I1124 08:51:12.970385 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" podStartSLOduration=1.970363049 podStartE2EDuration="1.970363049s" podCreationTimestamp="2025-11-24 08:51:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:51:12.964663969 +0000 UTC m=+945.080954863" watchObservedRunningTime="2025-11-24 08:51:12.970363049 +0000 UTC m=+945.086653953" Nov 24 08:51:19 crc kubenswrapper[4718]: I1124 08:51:19.608945 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb"] Nov 24 08:51:19 crc kubenswrapper[4718]: I1124 08:51:19.610778 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb" Nov 24 08:51:19 crc kubenswrapper[4718]: I1124 08:51:19.616061 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-fq6vw" Nov 24 08:51:19 crc kubenswrapper[4718]: I1124 08:51:19.620660 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb"] Nov 24 08:51:19 crc kubenswrapper[4718]: I1124 08:51:19.742643 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5241e0fd-f18b-4c8d-aa12-e5d61a29082e-bundle\") pod \"62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb\" (UID: \"5241e0fd-f18b-4c8d-aa12-e5d61a29082e\") " pod="openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb" Nov 24 08:51:19 crc kubenswrapper[4718]: I1124 08:51:19.743016 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xw2rw\" (UniqueName: \"kubernetes.io/projected/5241e0fd-f18b-4c8d-aa12-e5d61a29082e-kube-api-access-xw2rw\") pod \"62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb\" (UID: \"5241e0fd-f18b-4c8d-aa12-e5d61a29082e\") " pod="openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb" Nov 24 08:51:19 crc kubenswrapper[4718]: I1124 08:51:19.743083 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5241e0fd-f18b-4c8d-aa12-e5d61a29082e-util\") pod \"62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb\" (UID: \"5241e0fd-f18b-4c8d-aa12-e5d61a29082e\") " pod="openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb" Nov 24 08:51:19 crc kubenswrapper[4718]: I1124 08:51:19.844837 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xw2rw\" (UniqueName: \"kubernetes.io/projected/5241e0fd-f18b-4c8d-aa12-e5d61a29082e-kube-api-access-xw2rw\") pod \"62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb\" (UID: \"5241e0fd-f18b-4c8d-aa12-e5d61a29082e\") " pod="openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb" Nov 24 08:51:19 crc kubenswrapper[4718]: I1124 08:51:19.844937 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5241e0fd-f18b-4c8d-aa12-e5d61a29082e-util\") pod \"62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb\" (UID: \"5241e0fd-f18b-4c8d-aa12-e5d61a29082e\") " pod="openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb" Nov 24 08:51:19 crc kubenswrapper[4718]: I1124 08:51:19.845054 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5241e0fd-f18b-4c8d-aa12-e5d61a29082e-bundle\") pod \"62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb\" (UID: \"5241e0fd-f18b-4c8d-aa12-e5d61a29082e\") " pod="openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb" Nov 24 08:51:19 crc kubenswrapper[4718]: I1124 08:51:19.845703 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5241e0fd-f18b-4c8d-aa12-e5d61a29082e-bundle\") pod \"62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb\" (UID: \"5241e0fd-f18b-4c8d-aa12-e5d61a29082e\") " pod="openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb" Nov 24 08:51:19 crc kubenswrapper[4718]: I1124 08:51:19.845819 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5241e0fd-f18b-4c8d-aa12-e5d61a29082e-util\") pod \"62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb\" (UID: \"5241e0fd-f18b-4c8d-aa12-e5d61a29082e\") " pod="openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb" Nov 24 08:51:19 crc kubenswrapper[4718]: I1124 08:51:19.867292 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xw2rw\" (UniqueName: \"kubernetes.io/projected/5241e0fd-f18b-4c8d-aa12-e5d61a29082e-kube-api-access-xw2rw\") pod \"62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb\" (UID: \"5241e0fd-f18b-4c8d-aa12-e5d61a29082e\") " pod="openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb" Nov 24 08:51:19 crc kubenswrapper[4718]: I1124 08:51:19.940494 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb" Nov 24 08:51:20 crc kubenswrapper[4718]: I1124 08:51:20.256257 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb"] Nov 24 08:51:20 crc kubenswrapper[4718]: I1124 08:51:20.654555 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc"] Nov 24 08:51:20 crc kubenswrapper[4718]: I1124 08:51:20.657245 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc"] Nov 24 08:51:20 crc kubenswrapper[4718]: I1124 08:51:20.657422 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc" Nov 24 08:51:20 crc kubenswrapper[4718]: I1124 08:51:20.764299 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8bde562b-ba15-48c3-a959-bb7a2efb1ad1-bundle\") pod \"440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc\" (UID: \"8bde562b-ba15-48c3-a959-bb7a2efb1ad1\") " pod="openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc" Nov 24 08:51:20 crc kubenswrapper[4718]: I1124 08:51:20.764376 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cszl2\" (UniqueName: \"kubernetes.io/projected/8bde562b-ba15-48c3-a959-bb7a2efb1ad1-kube-api-access-cszl2\") pod \"440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc\" (UID: \"8bde562b-ba15-48c3-a959-bb7a2efb1ad1\") " pod="openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc" Nov 24 08:51:20 crc kubenswrapper[4718]: I1124 08:51:20.764450 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8bde562b-ba15-48c3-a959-bb7a2efb1ad1-util\") pod \"440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc\" (UID: \"8bde562b-ba15-48c3-a959-bb7a2efb1ad1\") " pod="openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc" Nov 24 08:51:20 crc kubenswrapper[4718]: I1124 08:51:20.865253 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8bde562b-ba15-48c3-a959-bb7a2efb1ad1-bundle\") pod \"440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc\" (UID: \"8bde562b-ba15-48c3-a959-bb7a2efb1ad1\") " pod="openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc" Nov 24 08:51:20 crc kubenswrapper[4718]: I1124 08:51:20.865344 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cszl2\" (UniqueName: \"kubernetes.io/projected/8bde562b-ba15-48c3-a959-bb7a2efb1ad1-kube-api-access-cszl2\") pod \"440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc\" (UID: \"8bde562b-ba15-48c3-a959-bb7a2efb1ad1\") " pod="openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc" Nov 24 08:51:20 crc kubenswrapper[4718]: I1124 08:51:20.865398 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8bde562b-ba15-48c3-a959-bb7a2efb1ad1-util\") pod \"440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc\" (UID: \"8bde562b-ba15-48c3-a959-bb7a2efb1ad1\") " pod="openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc" Nov 24 08:51:20 crc kubenswrapper[4718]: I1124 08:51:20.865887 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8bde562b-ba15-48c3-a959-bb7a2efb1ad1-bundle\") pod \"440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc\" (UID: \"8bde562b-ba15-48c3-a959-bb7a2efb1ad1\") " pod="openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc" Nov 24 08:51:20 crc kubenswrapper[4718]: I1124 08:51:20.865928 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8bde562b-ba15-48c3-a959-bb7a2efb1ad1-util\") pod \"440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc\" (UID: \"8bde562b-ba15-48c3-a959-bb7a2efb1ad1\") " pod="openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc" Nov 24 08:51:20 crc kubenswrapper[4718]: I1124 08:51:20.887528 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cszl2\" (UniqueName: \"kubernetes.io/projected/8bde562b-ba15-48c3-a959-bb7a2efb1ad1-kube-api-access-cszl2\") pod \"440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc\" (UID: \"8bde562b-ba15-48c3-a959-bb7a2efb1ad1\") " pod="openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc" Nov 24 08:51:20 crc kubenswrapper[4718]: I1124 08:51:20.991527 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc" Nov 24 08:51:21 crc kubenswrapper[4718]: I1124 08:51:21.005996 4718 generic.go:334] "Generic (PLEG): container finished" podID="5241e0fd-f18b-4c8d-aa12-e5d61a29082e" containerID="8fd9726be40d21ec0b37afa5772f8051a7d24e2d914185a575ad63afa0e00378" exitCode=0 Nov 24 08:51:21 crc kubenswrapper[4718]: I1124 08:51:21.006069 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb" event={"ID":"5241e0fd-f18b-4c8d-aa12-e5d61a29082e","Type":"ContainerDied","Data":"8fd9726be40d21ec0b37afa5772f8051a7d24e2d914185a575ad63afa0e00378"} Nov 24 08:51:21 crc kubenswrapper[4718]: I1124 08:51:21.006121 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb" event={"ID":"5241e0fd-f18b-4c8d-aa12-e5d61a29082e","Type":"ContainerStarted","Data":"8acc15beceba644ff77426825795d844fc3d28e1de48987decd30c04820bd402"} Nov 24 08:51:21 crc kubenswrapper[4718]: I1124 08:51:21.416889 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc"] Nov 24 08:51:22 crc kubenswrapper[4718]: I1124 08:51:22.016419 4718 generic.go:334] "Generic (PLEG): container finished" podID="8bde562b-ba15-48c3-a959-bb7a2efb1ad1" containerID="b8d8269667c32edfa980de5f7b9fb0f94fd4351c595c2d6eb6216faebc5b1724" exitCode=0 Nov 24 08:51:22 crc kubenswrapper[4718]: I1124 08:51:22.016512 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc" event={"ID":"8bde562b-ba15-48c3-a959-bb7a2efb1ad1","Type":"ContainerDied","Data":"b8d8269667c32edfa980de5f7b9fb0f94fd4351c595c2d6eb6216faebc5b1724"} Nov 24 08:51:22 crc kubenswrapper[4718]: I1124 08:51:22.016731 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc" event={"ID":"8bde562b-ba15-48c3-a959-bb7a2efb1ad1","Type":"ContainerStarted","Data":"77544a1466546cd3a81435fdcb1e379ab6154e1f002d97ebbcc14f069cd703bc"} Nov 24 08:51:22 crc kubenswrapper[4718]: I1124 08:51:22.045169 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:51:22 crc kubenswrapper[4718]: I1124 08:51:22.045219 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:51:23 crc kubenswrapper[4718]: I1124 08:51:23.027354 4718 generic.go:334] "Generic (PLEG): container finished" podID="5241e0fd-f18b-4c8d-aa12-e5d61a29082e" containerID="7887a68337c72b30c2d8c730e16538ab030161ca5aab0da08a1452f640b98f7e" exitCode=0 Nov 24 08:51:23 crc kubenswrapper[4718]: I1124 08:51:23.027412 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb" event={"ID":"5241e0fd-f18b-4c8d-aa12-e5d61a29082e","Type":"ContainerDied","Data":"7887a68337c72b30c2d8c730e16538ab030161ca5aab0da08a1452f640b98f7e"} Nov 24 08:51:24 crc kubenswrapper[4718]: I1124 08:51:24.035907 4718 generic.go:334] "Generic (PLEG): container finished" podID="5241e0fd-f18b-4c8d-aa12-e5d61a29082e" containerID="ca7528e3c5e216a10bfb96116f2811a7babbc4cd598248ba0ee9c03b897d505d" exitCode=0 Nov 24 08:51:24 crc kubenswrapper[4718]: I1124 08:51:24.035966 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb" event={"ID":"5241e0fd-f18b-4c8d-aa12-e5d61a29082e","Type":"ContainerDied","Data":"ca7528e3c5e216a10bfb96116f2811a7babbc4cd598248ba0ee9c03b897d505d"} Nov 24 08:51:24 crc kubenswrapper[4718]: I1124 08:51:24.038288 4718 generic.go:334] "Generic (PLEG): container finished" podID="8bde562b-ba15-48c3-a959-bb7a2efb1ad1" containerID="dd1694335ff9d21d9adc6b2f8eac9fa5a91fa73d975f01052fee41501b68afa5" exitCode=0 Nov 24 08:51:24 crc kubenswrapper[4718]: I1124 08:51:24.038315 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc" event={"ID":"8bde562b-ba15-48c3-a959-bb7a2efb1ad1","Type":"ContainerDied","Data":"dd1694335ff9d21d9adc6b2f8eac9fa5a91fa73d975f01052fee41501b68afa5"} Nov 24 08:51:25 crc kubenswrapper[4718]: I1124 08:51:25.058080 4718 generic.go:334] "Generic (PLEG): container finished" podID="8bde562b-ba15-48c3-a959-bb7a2efb1ad1" containerID="dfea7f231dd4f7d20f4f8c5baf7757f70df341beb6c20afec7042bfdedac862d" exitCode=0 Nov 24 08:51:25 crc kubenswrapper[4718]: I1124 08:51:25.058148 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc" event={"ID":"8bde562b-ba15-48c3-a959-bb7a2efb1ad1","Type":"ContainerDied","Data":"dfea7f231dd4f7d20f4f8c5baf7757f70df341beb6c20afec7042bfdedac862d"} Nov 24 08:51:25 crc kubenswrapper[4718]: I1124 08:51:25.339395 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb" Nov 24 08:51:25 crc kubenswrapper[4718]: I1124 08:51:25.436854 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xw2rw\" (UniqueName: \"kubernetes.io/projected/5241e0fd-f18b-4c8d-aa12-e5d61a29082e-kube-api-access-xw2rw\") pod \"5241e0fd-f18b-4c8d-aa12-e5d61a29082e\" (UID: \"5241e0fd-f18b-4c8d-aa12-e5d61a29082e\") " Nov 24 08:51:25 crc kubenswrapper[4718]: I1124 08:51:25.437089 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5241e0fd-f18b-4c8d-aa12-e5d61a29082e-bundle\") pod \"5241e0fd-f18b-4c8d-aa12-e5d61a29082e\" (UID: \"5241e0fd-f18b-4c8d-aa12-e5d61a29082e\") " Nov 24 08:51:25 crc kubenswrapper[4718]: I1124 08:51:25.437154 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5241e0fd-f18b-4c8d-aa12-e5d61a29082e-util\") pod \"5241e0fd-f18b-4c8d-aa12-e5d61a29082e\" (UID: \"5241e0fd-f18b-4c8d-aa12-e5d61a29082e\") " Nov 24 08:51:25 crc kubenswrapper[4718]: I1124 08:51:25.438202 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5241e0fd-f18b-4c8d-aa12-e5d61a29082e-bundle" (OuterVolumeSpecName: "bundle") pod "5241e0fd-f18b-4c8d-aa12-e5d61a29082e" (UID: "5241e0fd-f18b-4c8d-aa12-e5d61a29082e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:51:25 crc kubenswrapper[4718]: I1124 08:51:25.443355 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5241e0fd-f18b-4c8d-aa12-e5d61a29082e-kube-api-access-xw2rw" (OuterVolumeSpecName: "kube-api-access-xw2rw") pod "5241e0fd-f18b-4c8d-aa12-e5d61a29082e" (UID: "5241e0fd-f18b-4c8d-aa12-e5d61a29082e"). InnerVolumeSpecName "kube-api-access-xw2rw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:51:25 crc kubenswrapper[4718]: I1124 08:51:25.452721 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5241e0fd-f18b-4c8d-aa12-e5d61a29082e-util" (OuterVolumeSpecName: "util") pod "5241e0fd-f18b-4c8d-aa12-e5d61a29082e" (UID: "5241e0fd-f18b-4c8d-aa12-e5d61a29082e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:51:25 crc kubenswrapper[4718]: I1124 08:51:25.538740 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xw2rw\" (UniqueName: \"kubernetes.io/projected/5241e0fd-f18b-4c8d-aa12-e5d61a29082e-kube-api-access-xw2rw\") on node \"crc\" DevicePath \"\"" Nov 24 08:51:25 crc kubenswrapper[4718]: I1124 08:51:25.538790 4718 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5241e0fd-f18b-4c8d-aa12-e5d61a29082e-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:51:25 crc kubenswrapper[4718]: I1124 08:51:25.538800 4718 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5241e0fd-f18b-4c8d-aa12-e5d61a29082e-util\") on node \"crc\" DevicePath \"\"" Nov 24 08:51:26 crc kubenswrapper[4718]: I1124 08:51:26.066078 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb" event={"ID":"5241e0fd-f18b-4c8d-aa12-e5d61a29082e","Type":"ContainerDied","Data":"8acc15beceba644ff77426825795d844fc3d28e1de48987decd30c04820bd402"} Nov 24 08:51:26 crc kubenswrapper[4718]: I1124 08:51:26.066105 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb" Nov 24 08:51:26 crc kubenswrapper[4718]: I1124 08:51:26.066122 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8acc15beceba644ff77426825795d844fc3d28e1de48987decd30c04820bd402" Nov 24 08:51:26 crc kubenswrapper[4718]: I1124 08:51:26.368099 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc" Nov 24 08:51:26 crc kubenswrapper[4718]: I1124 08:51:26.449852 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8bde562b-ba15-48c3-a959-bb7a2efb1ad1-util\") pod \"8bde562b-ba15-48c3-a959-bb7a2efb1ad1\" (UID: \"8bde562b-ba15-48c3-a959-bb7a2efb1ad1\") " Nov 24 08:51:26 crc kubenswrapper[4718]: I1124 08:51:26.450004 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cszl2\" (UniqueName: \"kubernetes.io/projected/8bde562b-ba15-48c3-a959-bb7a2efb1ad1-kube-api-access-cszl2\") pod \"8bde562b-ba15-48c3-a959-bb7a2efb1ad1\" (UID: \"8bde562b-ba15-48c3-a959-bb7a2efb1ad1\") " Nov 24 08:51:26 crc kubenswrapper[4718]: I1124 08:51:26.450119 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8bde562b-ba15-48c3-a959-bb7a2efb1ad1-bundle\") pod \"8bde562b-ba15-48c3-a959-bb7a2efb1ad1\" (UID: \"8bde562b-ba15-48c3-a959-bb7a2efb1ad1\") " Nov 24 08:51:26 crc kubenswrapper[4718]: I1124 08:51:26.450912 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bde562b-ba15-48c3-a959-bb7a2efb1ad1-bundle" (OuterVolumeSpecName: "bundle") pod "8bde562b-ba15-48c3-a959-bb7a2efb1ad1" (UID: "8bde562b-ba15-48c3-a959-bb7a2efb1ad1"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:51:26 crc kubenswrapper[4718]: I1124 08:51:26.454118 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bde562b-ba15-48c3-a959-bb7a2efb1ad1-kube-api-access-cszl2" (OuterVolumeSpecName: "kube-api-access-cszl2") pod "8bde562b-ba15-48c3-a959-bb7a2efb1ad1" (UID: "8bde562b-ba15-48c3-a959-bb7a2efb1ad1"). InnerVolumeSpecName "kube-api-access-cszl2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:51:26 crc kubenswrapper[4718]: I1124 08:51:26.466772 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bde562b-ba15-48c3-a959-bb7a2efb1ad1-util" (OuterVolumeSpecName: "util") pod "8bde562b-ba15-48c3-a959-bb7a2efb1ad1" (UID: "8bde562b-ba15-48c3-a959-bb7a2efb1ad1"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:51:26 crc kubenswrapper[4718]: I1124 08:51:26.552233 4718 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8bde562b-ba15-48c3-a959-bb7a2efb1ad1-util\") on node \"crc\" DevicePath \"\"" Nov 24 08:51:26 crc kubenswrapper[4718]: I1124 08:51:26.552267 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cszl2\" (UniqueName: \"kubernetes.io/projected/8bde562b-ba15-48c3-a959-bb7a2efb1ad1-kube-api-access-cszl2\") on node \"crc\" DevicePath \"\"" Nov 24 08:51:26 crc kubenswrapper[4718]: I1124 08:51:26.552281 4718 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8bde562b-ba15-48c3-a959-bb7a2efb1ad1-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:51:27 crc kubenswrapper[4718]: I1124 08:51:27.074785 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc" event={"ID":"8bde562b-ba15-48c3-a959-bb7a2efb1ad1","Type":"ContainerDied","Data":"77544a1466546cd3a81435fdcb1e379ab6154e1f002d97ebbcc14f069cd703bc"} Nov 24 08:51:27 crc kubenswrapper[4718]: I1124 08:51:27.074834 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77544a1466546cd3a81435fdcb1e379ab6154e1f002d97ebbcc14f069cd703bc" Nov 24 08:51:27 crc kubenswrapper[4718]: I1124 08:51:27.074908 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc" Nov 24 08:51:43 crc kubenswrapper[4718]: I1124 08:51:43.971383 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/keystone-668c488b5b-8jjjq" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.310841 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55"] Nov 24 08:51:45 crc kubenswrapper[4718]: E1124 08:51:45.311139 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bde562b-ba15-48c3-a959-bb7a2efb1ad1" containerName="extract" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.311153 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bde562b-ba15-48c3-a959-bb7a2efb1ad1" containerName="extract" Nov 24 08:51:45 crc kubenswrapper[4718]: E1124 08:51:45.311165 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5241e0fd-f18b-4c8d-aa12-e5d61a29082e" containerName="pull" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.311171 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="5241e0fd-f18b-4c8d-aa12-e5d61a29082e" containerName="pull" Nov 24 08:51:45 crc kubenswrapper[4718]: E1124 08:51:45.311181 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bde562b-ba15-48c3-a959-bb7a2efb1ad1" containerName="util" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.311187 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bde562b-ba15-48c3-a959-bb7a2efb1ad1" containerName="util" Nov 24 08:51:45 crc kubenswrapper[4718]: E1124 08:51:45.311201 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5241e0fd-f18b-4c8d-aa12-e5d61a29082e" containerName="util" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.311206 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="5241e0fd-f18b-4c8d-aa12-e5d61a29082e" containerName="util" Nov 24 08:51:45 crc kubenswrapper[4718]: E1124 08:51:45.311216 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bde562b-ba15-48c3-a959-bb7a2efb1ad1" containerName="pull" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.311222 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bde562b-ba15-48c3-a959-bb7a2efb1ad1" containerName="pull" Nov 24 08:51:45 crc kubenswrapper[4718]: E1124 08:51:45.311234 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5241e0fd-f18b-4c8d-aa12-e5d61a29082e" containerName="extract" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.311241 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="5241e0fd-f18b-4c8d-aa12-e5d61a29082e" containerName="extract" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.311370 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="5241e0fd-f18b-4c8d-aa12-e5d61a29082e" containerName="extract" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.311381 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bde562b-ba15-48c3-a959-bb7a2efb1ad1" containerName="extract" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.312086 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.314421 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-62q9v" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.314739 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-service-cert" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.331623 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55"] Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.456583 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9e3d8607-7623-4fa8-bc89-d39fb3e438a1-apiservice-cert\") pod \"swift-operator-controller-manager-c4c6f6d75-kgf55\" (UID: \"9e3d8607-7623-4fa8-bc89-d39fb3e438a1\") " pod="openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.456691 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcg7s\" (UniqueName: \"kubernetes.io/projected/9e3d8607-7623-4fa8-bc89-d39fb3e438a1-kube-api-access-lcg7s\") pod \"swift-operator-controller-manager-c4c6f6d75-kgf55\" (UID: \"9e3d8607-7623-4fa8-bc89-d39fb3e438a1\") " pod="openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.456796 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9e3d8607-7623-4fa8-bc89-d39fb3e438a1-webhook-cert\") pod \"swift-operator-controller-manager-c4c6f6d75-kgf55\" (UID: \"9e3d8607-7623-4fa8-bc89-d39fb3e438a1\") " pod="openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.557859 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9e3d8607-7623-4fa8-bc89-d39fb3e438a1-apiservice-cert\") pod \"swift-operator-controller-manager-c4c6f6d75-kgf55\" (UID: \"9e3d8607-7623-4fa8-bc89-d39fb3e438a1\") " pod="openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.557923 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcg7s\" (UniqueName: \"kubernetes.io/projected/9e3d8607-7623-4fa8-bc89-d39fb3e438a1-kube-api-access-lcg7s\") pod \"swift-operator-controller-manager-c4c6f6d75-kgf55\" (UID: \"9e3d8607-7623-4fa8-bc89-d39fb3e438a1\") " pod="openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.558002 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9e3d8607-7623-4fa8-bc89-d39fb3e438a1-webhook-cert\") pod \"swift-operator-controller-manager-c4c6f6d75-kgf55\" (UID: \"9e3d8607-7623-4fa8-bc89-d39fb3e438a1\") " pod="openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.565322 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9e3d8607-7623-4fa8-bc89-d39fb3e438a1-apiservice-cert\") pod \"swift-operator-controller-manager-c4c6f6d75-kgf55\" (UID: \"9e3d8607-7623-4fa8-bc89-d39fb3e438a1\") " pod="openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.568569 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9e3d8607-7623-4fa8-bc89-d39fb3e438a1-webhook-cert\") pod \"swift-operator-controller-manager-c4c6f6d75-kgf55\" (UID: \"9e3d8607-7623-4fa8-bc89-d39fb3e438a1\") " pod="openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.578747 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcg7s\" (UniqueName: \"kubernetes.io/projected/9e3d8607-7623-4fa8-bc89-d39fb3e438a1-kube-api-access-lcg7s\") pod \"swift-operator-controller-manager-c4c6f6d75-kgf55\" (UID: \"9e3d8607-7623-4fa8-bc89-d39fb3e438a1\") " pod="openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.630896 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55" Nov 24 08:51:45 crc kubenswrapper[4718]: I1124 08:51:45.907434 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55"] Nov 24 08:51:46 crc kubenswrapper[4718]: I1124 08:51:46.216384 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55" event={"ID":"9e3d8607-7623-4fa8-bc89-d39fb3e438a1","Type":"ContainerStarted","Data":"ef04e4f87d7ddd9ceea60e76e761d7aa3689df33cf4e842c4462be67e454c7fd"} Nov 24 08:51:49 crc kubenswrapper[4718]: I1124 08:51:49.240043 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55" event={"ID":"9e3d8607-7623-4fa8-bc89-d39fb3e438a1","Type":"ContainerStarted","Data":"9929e953934966001ae88099680155c1ab18ec2e0ea366a272d6108700fa88c1"} Nov 24 08:51:49 crc kubenswrapper[4718]: I1124 08:51:49.241020 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55" Nov 24 08:51:49 crc kubenswrapper[4718]: I1124 08:51:49.241036 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55" event={"ID":"9e3d8607-7623-4fa8-bc89-d39fb3e438a1","Type":"ContainerStarted","Data":"a7ac39230181c8b0c83681ebcdd780858e3ea642c205126a5a065310053e4c5f"} Nov 24 08:51:49 crc kubenswrapper[4718]: I1124 08:51:49.267144 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55" podStartSLOduration=1.772148263 podStartE2EDuration="4.267115818s" podCreationTimestamp="2025-11-24 08:51:45 +0000 UTC" firstStartedPulling="2025-11-24 08:51:45.912313983 +0000 UTC m=+978.028604887" lastFinishedPulling="2025-11-24 08:51:48.407281538 +0000 UTC m=+980.523572442" observedRunningTime="2025-11-24 08:51:49.260663251 +0000 UTC m=+981.376954155" watchObservedRunningTime="2025-11-24 08:51:49.267115818 +0000 UTC m=+981.383406722" Nov 24 08:51:51 crc kubenswrapper[4718]: I1124 08:51:51.529987 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf"] Nov 24 08:51:51 crc kubenswrapper[4718]: I1124 08:51:51.532536 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf" Nov 24 08:51:51 crc kubenswrapper[4718]: I1124 08:51:51.535539 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-service-cert" Nov 24 08:51:51 crc kubenswrapper[4718]: I1124 08:51:51.535721 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-4qsjt" Nov 24 08:51:51 crc kubenswrapper[4718]: I1124 08:51:51.541660 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf"] Nov 24 08:51:51 crc kubenswrapper[4718]: I1124 08:51:51.652465 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5114618f-d8e8-4006-8e3b-4c13e4aa9748-webhook-cert\") pod \"horizon-operator-controller-manager-648556d4d5-vv9mf\" (UID: \"5114618f-d8e8-4006-8e3b-4c13e4aa9748\") " pod="openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf" Nov 24 08:51:51 crc kubenswrapper[4718]: I1124 08:51:51.652522 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5114618f-d8e8-4006-8e3b-4c13e4aa9748-apiservice-cert\") pod \"horizon-operator-controller-manager-648556d4d5-vv9mf\" (UID: \"5114618f-d8e8-4006-8e3b-4c13e4aa9748\") " pod="openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf" Nov 24 08:51:51 crc kubenswrapper[4718]: I1124 08:51:51.652639 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htq2l\" (UniqueName: \"kubernetes.io/projected/5114618f-d8e8-4006-8e3b-4c13e4aa9748-kube-api-access-htq2l\") pod \"horizon-operator-controller-manager-648556d4d5-vv9mf\" (UID: \"5114618f-d8e8-4006-8e3b-4c13e4aa9748\") " pod="openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf" Nov 24 08:51:51 crc kubenswrapper[4718]: I1124 08:51:51.754184 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htq2l\" (UniqueName: \"kubernetes.io/projected/5114618f-d8e8-4006-8e3b-4c13e4aa9748-kube-api-access-htq2l\") pod \"horizon-operator-controller-manager-648556d4d5-vv9mf\" (UID: \"5114618f-d8e8-4006-8e3b-4c13e4aa9748\") " pod="openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf" Nov 24 08:51:51 crc kubenswrapper[4718]: I1124 08:51:51.754275 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5114618f-d8e8-4006-8e3b-4c13e4aa9748-webhook-cert\") pod \"horizon-operator-controller-manager-648556d4d5-vv9mf\" (UID: \"5114618f-d8e8-4006-8e3b-4c13e4aa9748\") " pod="openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf" Nov 24 08:51:51 crc kubenswrapper[4718]: I1124 08:51:51.754311 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5114618f-d8e8-4006-8e3b-4c13e4aa9748-apiservice-cert\") pod \"horizon-operator-controller-manager-648556d4d5-vv9mf\" (UID: \"5114618f-d8e8-4006-8e3b-4c13e4aa9748\") " pod="openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf" Nov 24 08:51:51 crc kubenswrapper[4718]: I1124 08:51:51.760400 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5114618f-d8e8-4006-8e3b-4c13e4aa9748-webhook-cert\") pod \"horizon-operator-controller-manager-648556d4d5-vv9mf\" (UID: \"5114618f-d8e8-4006-8e3b-4c13e4aa9748\") " pod="openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf" Nov 24 08:51:51 crc kubenswrapper[4718]: I1124 08:51:51.766715 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5114618f-d8e8-4006-8e3b-4c13e4aa9748-apiservice-cert\") pod \"horizon-operator-controller-manager-648556d4d5-vv9mf\" (UID: \"5114618f-d8e8-4006-8e3b-4c13e4aa9748\") " pod="openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf" Nov 24 08:51:51 crc kubenswrapper[4718]: I1124 08:51:51.770279 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htq2l\" (UniqueName: \"kubernetes.io/projected/5114618f-d8e8-4006-8e3b-4c13e4aa9748-kube-api-access-htq2l\") pod \"horizon-operator-controller-manager-648556d4d5-vv9mf\" (UID: \"5114618f-d8e8-4006-8e3b-4c13e4aa9748\") " pod="openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf" Nov 24 08:51:51 crc kubenswrapper[4718]: I1124 08:51:51.854286 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf" Nov 24 08:51:52 crc kubenswrapper[4718]: I1124 08:51:52.045130 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:51:52 crc kubenswrapper[4718]: I1124 08:51:52.045189 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:51:52 crc kubenswrapper[4718]: I1124 08:51:52.045232 4718 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:51:52 crc kubenswrapper[4718]: I1124 08:51:52.045864 4718 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1a5f70c58a45eccf71c6de8475549daad92f17e19b44d32bf6a0b7edbca6ed9f"} pod="openshift-machine-config-operator/machine-config-daemon-575gl" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 08:51:52 crc kubenswrapper[4718]: I1124 08:51:52.045925 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" containerID="cri-o://1a5f70c58a45eccf71c6de8475549daad92f17e19b44d32bf6a0b7edbca6ed9f" gracePeriod=600 Nov 24 08:51:52 crc kubenswrapper[4718]: I1124 08:51:52.262133 4718 generic.go:334] "Generic (PLEG): container finished" podID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerID="1a5f70c58a45eccf71c6de8475549daad92f17e19b44d32bf6a0b7edbca6ed9f" exitCode=0 Nov 24 08:51:52 crc kubenswrapper[4718]: I1124 08:51:52.262215 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerDied","Data":"1a5f70c58a45eccf71c6de8475549daad92f17e19b44d32bf6a0b7edbca6ed9f"} Nov 24 08:51:52 crc kubenswrapper[4718]: I1124 08:51:52.262717 4718 scope.go:117] "RemoveContainer" containerID="617141ef091b02db6eb1b54328e03850e47f6f2d095a10a3726c1cd67c78f520" Nov 24 08:51:52 crc kubenswrapper[4718]: I1124 08:51:52.313194 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf"] Nov 24 08:51:52 crc kubenswrapper[4718]: W1124 08:51:52.318252 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5114618f_d8e8_4006_8e3b_4c13e4aa9748.slice/crio-547af693b0b19c05826f2cadb1e73b69af70de6208807fdaa4945a1061c45b37 WatchSource:0}: Error finding container 547af693b0b19c05826f2cadb1e73b69af70de6208807fdaa4945a1061c45b37: Status 404 returned error can't find the container with id 547af693b0b19c05826f2cadb1e73b69af70de6208807fdaa4945a1061c45b37 Nov 24 08:51:53 crc kubenswrapper[4718]: I1124 08:51:53.272153 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerStarted","Data":"aa05c9aab5bce7122c842c494e5738a78a924285d3cae2d6dd4b40d0c97d9b86"} Nov 24 08:51:53 crc kubenswrapper[4718]: I1124 08:51:53.273872 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf" event={"ID":"5114618f-d8e8-4006-8e3b-4c13e4aa9748","Type":"ContainerStarted","Data":"547af693b0b19c05826f2cadb1e73b69af70de6208807fdaa4945a1061c45b37"} Nov 24 08:51:55 crc kubenswrapper[4718]: I1124 08:51:55.635614 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-c4c6f6d75-kgf55" Nov 24 08:51:56 crc kubenswrapper[4718]: I1124 08:51:56.297658 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf" event={"ID":"5114618f-d8e8-4006-8e3b-4c13e4aa9748","Type":"ContainerStarted","Data":"5a7a4d35bd763b7bb3fc9e146937ff0f627de2cd9af12e178561f95ccfbdbb3e"} Nov 24 08:51:56 crc kubenswrapper[4718]: I1124 08:51:56.297704 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf" event={"ID":"5114618f-d8e8-4006-8e3b-4c13e4aa9748","Type":"ContainerStarted","Data":"19e14fc68bd182f51f83036a6069df16093f5155b4e9efc8d76b38f7c1511bfc"} Nov 24 08:51:56 crc kubenswrapper[4718]: I1124 08:51:56.297816 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf" Nov 24 08:51:56 crc kubenswrapper[4718]: I1124 08:51:56.312529 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf" podStartSLOduration=2.336211959 podStartE2EDuration="5.312508852s" podCreationTimestamp="2025-11-24 08:51:51 +0000 UTC" firstStartedPulling="2025-11-24 08:51:52.320757934 +0000 UTC m=+984.437048838" lastFinishedPulling="2025-11-24 08:51:55.297054827 +0000 UTC m=+987.413345731" observedRunningTime="2025-11-24 08:51:56.311706881 +0000 UTC m=+988.427997795" watchObservedRunningTime="2025-11-24 08:51:56.312508852 +0000 UTC m=+988.428799766" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.042489 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/swift-storage-0"] Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.047396 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.050261 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"swift-conf" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.050930 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"swift-swift-dockercfg-ftxwt" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.051321 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"swift-storage-config-data" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.051333 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"swift-ring-files" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.078992 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/swift-storage-0"] Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.148374 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.148438 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kt2hs\" (UniqueName: \"kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-kube-api-access-kt2hs\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.148478 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/883e6594-9051-4dae-85fc-5f7d8bf60bab-lock\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.148504 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.148529 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/883e6594-9051-4dae-85fc-5f7d8bf60bab-cache\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.249305 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.249356 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/883e6594-9051-4dae-85fc-5f7d8bf60bab-cache\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.249420 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.249487 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kt2hs\" (UniqueName: \"kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-kube-api-access-kt2hs\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.249524 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/883e6594-9051-4dae-85fc-5f7d8bf60bab-lock\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.249716 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") device mount path \"/mnt/openstack/pv02\"" pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:51:58 crc kubenswrapper[4718]: E1124 08:51:58.249956 4718 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Nov 24 08:51:58 crc kubenswrapper[4718]: E1124 08:51:58.250002 4718 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.250015 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/883e6594-9051-4dae-85fc-5f7d8bf60bab-cache\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:51:58 crc kubenswrapper[4718]: E1124 08:51:58.250068 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift podName:883e6594-9051-4dae-85fc-5f7d8bf60bab nodeName:}" failed. No retries permitted until 2025-11-24 08:51:58.750047111 +0000 UTC m=+990.866338025 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift") pod "swift-storage-0" (UID: "883e6594-9051-4dae-85fc-5f7d8bf60bab") : configmap "swift-ring-files" not found Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.250069 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/883e6594-9051-4dae-85fc-5f7d8bf60bab-lock\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.282205 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.282958 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kt2hs\" (UniqueName: \"kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-kube-api-access-kt2hs\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.653309 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/swift-ring-rebalance-jmbxx"] Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.654644 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.657446 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"swift-proxy-config-data" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.657674 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"swift-ring-config-data" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.657787 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"swift-ring-scripts" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.671623 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/swift-ring-rebalance-jmbxx"] Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.755656 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/7682a690-8f1e-435b-b29d-5d6e8c60676b-ring-data-devices\") pod \"swift-ring-rebalance-jmbxx\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.755722 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.755748 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwkkj\" (UniqueName: \"kubernetes.io/projected/7682a690-8f1e-435b-b29d-5d6e8c60676b-kube-api-access-rwkkj\") pod \"swift-ring-rebalance-jmbxx\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.755768 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7682a690-8f1e-435b-b29d-5d6e8c60676b-scripts\") pod \"swift-ring-rebalance-jmbxx\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.755785 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/7682a690-8f1e-435b-b29d-5d6e8c60676b-dispersionconf\") pod \"swift-ring-rebalance-jmbxx\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.755806 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/7682a690-8f1e-435b-b29d-5d6e8c60676b-etc-swift\") pod \"swift-ring-rebalance-jmbxx\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.755833 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/7682a690-8f1e-435b-b29d-5d6e8c60676b-swiftconf\") pod \"swift-ring-rebalance-jmbxx\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: E1124 08:51:58.755883 4718 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Nov 24 08:51:58 crc kubenswrapper[4718]: E1124 08:51:58.755908 4718 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Nov 24 08:51:58 crc kubenswrapper[4718]: E1124 08:51:58.755954 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift podName:883e6594-9051-4dae-85fc-5f7d8bf60bab nodeName:}" failed. No retries permitted until 2025-11-24 08:51:59.755935969 +0000 UTC m=+991.872226873 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift") pod "swift-storage-0" (UID: "883e6594-9051-4dae-85fc-5f7d8bf60bab") : configmap "swift-ring-files" not found Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.859342 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwkkj\" (UniqueName: \"kubernetes.io/projected/7682a690-8f1e-435b-b29d-5d6e8c60676b-kube-api-access-rwkkj\") pod \"swift-ring-rebalance-jmbxx\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.859405 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7682a690-8f1e-435b-b29d-5d6e8c60676b-scripts\") pod \"swift-ring-rebalance-jmbxx\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.859429 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/7682a690-8f1e-435b-b29d-5d6e8c60676b-dispersionconf\") pod \"swift-ring-rebalance-jmbxx\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.859453 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/7682a690-8f1e-435b-b29d-5d6e8c60676b-etc-swift\") pod \"swift-ring-rebalance-jmbxx\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.859495 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/7682a690-8f1e-435b-b29d-5d6e8c60676b-swiftconf\") pod \"swift-ring-rebalance-jmbxx\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.859574 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/7682a690-8f1e-435b-b29d-5d6e8c60676b-ring-data-devices\") pod \"swift-ring-rebalance-jmbxx\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.860780 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/7682a690-8f1e-435b-b29d-5d6e8c60676b-ring-data-devices\") pod \"swift-ring-rebalance-jmbxx\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.861966 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/7682a690-8f1e-435b-b29d-5d6e8c60676b-etc-swift\") pod \"swift-ring-rebalance-jmbxx\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.862279 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7682a690-8f1e-435b-b29d-5d6e8c60676b-scripts\") pod \"swift-ring-rebalance-jmbxx\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.867761 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/7682a690-8f1e-435b-b29d-5d6e8c60676b-dispersionconf\") pod \"swift-ring-rebalance-jmbxx\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.878547 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/7682a690-8f1e-435b-b29d-5d6e8c60676b-swiftconf\") pod \"swift-ring-rebalance-jmbxx\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.904734 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwkkj\" (UniqueName: \"kubernetes.io/projected/7682a690-8f1e-435b-b29d-5d6e8c60676b-kube-api-access-rwkkj\") pod \"swift-ring-rebalance-jmbxx\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:58 crc kubenswrapper[4718]: I1124 08:51:58.974835 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:51:59 crc kubenswrapper[4718]: I1124 08:51:59.361471 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/swift-ring-rebalance-jmbxx"] Nov 24 08:51:59 crc kubenswrapper[4718]: W1124 08:51:59.366514 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7682a690_8f1e_435b_b29d_5d6e8c60676b.slice/crio-b98f4633fc426108154755ed9dbfbc9cbaf33c2efa7c4938b640a4c701966078 WatchSource:0}: Error finding container b98f4633fc426108154755ed9dbfbc9cbaf33c2efa7c4938b640a4c701966078: Status 404 returned error can't find the container with id b98f4633fc426108154755ed9dbfbc9cbaf33c2efa7c4938b640a4c701966078 Nov 24 08:51:59 crc kubenswrapper[4718]: I1124 08:51:59.768836 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-index-lmfm8"] Nov 24 08:51:59 crc kubenswrapper[4718]: I1124 08:51:59.769760 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-index-lmfm8" Nov 24 08:51:59 crc kubenswrapper[4718]: I1124 08:51:59.772861 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:51:59 crc kubenswrapper[4718]: E1124 08:51:59.773079 4718 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Nov 24 08:51:59 crc kubenswrapper[4718]: E1124 08:51:59.773098 4718 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Nov 24 08:51:59 crc kubenswrapper[4718]: E1124 08:51:59.773150 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift podName:883e6594-9051-4dae-85fc-5f7d8bf60bab nodeName:}" failed. No retries permitted until 2025-11-24 08:52:01.773131182 +0000 UTC m=+993.889422086 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift") pod "swift-storage-0" (UID: "883e6594-9051-4dae-85fc-5f7d8bf60bab") : configmap "swift-ring-files" not found Nov 24 08:51:59 crc kubenswrapper[4718]: I1124 08:51:59.773724 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-index-dockercfg-wvcmp" Nov 24 08:51:59 crc kubenswrapper[4718]: I1124 08:51:59.780991 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-index-lmfm8"] Nov 24 08:51:59 crc kubenswrapper[4718]: I1124 08:51:59.874740 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzhrf\" (UniqueName: \"kubernetes.io/projected/d2bb363b-e3a7-4a05-a44a-62bbf3b7c879-kube-api-access-pzhrf\") pod \"glance-operator-index-lmfm8\" (UID: \"d2bb363b-e3a7-4a05-a44a-62bbf3b7c879\") " pod="openstack-operators/glance-operator-index-lmfm8" Nov 24 08:51:59 crc kubenswrapper[4718]: I1124 08:51:59.975774 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzhrf\" (UniqueName: \"kubernetes.io/projected/d2bb363b-e3a7-4a05-a44a-62bbf3b7c879-kube-api-access-pzhrf\") pod \"glance-operator-index-lmfm8\" (UID: \"d2bb363b-e3a7-4a05-a44a-62bbf3b7c879\") " pod="openstack-operators/glance-operator-index-lmfm8" Nov 24 08:51:59 crc kubenswrapper[4718]: I1124 08:51:59.997423 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzhrf\" (UniqueName: \"kubernetes.io/projected/d2bb363b-e3a7-4a05-a44a-62bbf3b7c879-kube-api-access-pzhrf\") pod \"glance-operator-index-lmfm8\" (UID: \"d2bb363b-e3a7-4a05-a44a-62bbf3b7c879\") " pod="openstack-operators/glance-operator-index-lmfm8" Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.088701 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-index-lmfm8" Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.328848 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" event={"ID":"7682a690-8f1e-435b-b29d-5d6e8c60676b","Type":"ContainerStarted","Data":"b98f4633fc426108154755ed9dbfbc9cbaf33c2efa7c4938b640a4c701966078"} Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.416526 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/swift-proxy-547856594f-nf4j4"] Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.417714 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.429084 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/swift-proxy-547856594f-nf4j4"] Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.485221 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62d23e96-0761-4def-909e-dd0027504b8e-log-httpd\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.485298 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62d23e96-0761-4def-909e-dd0027504b8e-config-data\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.485355 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc2v5\" (UniqueName: \"kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-kube-api-access-fc2v5\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.485385 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-etc-swift\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.485406 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62d23e96-0761-4def-909e-dd0027504b8e-run-httpd\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.553009 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-index-lmfm8"] Nov 24 08:52:00 crc kubenswrapper[4718]: W1124 08:52:00.556690 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd2bb363b_e3a7_4a05_a44a_62bbf3b7c879.slice/crio-2528a47fa5b514bf29b9f78c254bc6be0e9eb0d4972de7f9c23dd031e2f8017c WatchSource:0}: Error finding container 2528a47fa5b514bf29b9f78c254bc6be0e9eb0d4972de7f9c23dd031e2f8017c: Status 404 returned error can't find the container with id 2528a47fa5b514bf29b9f78c254bc6be0e9eb0d4972de7f9c23dd031e2f8017c Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.611526 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62d23e96-0761-4def-909e-dd0027504b8e-config-data\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.611580 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc2v5\" (UniqueName: \"kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-kube-api-access-fc2v5\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.611609 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-etc-swift\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.611626 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62d23e96-0761-4def-909e-dd0027504b8e-run-httpd\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.611707 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62d23e96-0761-4def-909e-dd0027504b8e-log-httpd\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:00 crc kubenswrapper[4718]: E1124 08:52:00.611949 4718 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Nov 24 08:52:00 crc kubenswrapper[4718]: E1124 08:52:00.611980 4718 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-proxy-547856594f-nf4j4: configmap "swift-ring-files" not found Nov 24 08:52:00 crc kubenswrapper[4718]: E1124 08:52:00.612056 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-etc-swift podName:62d23e96-0761-4def-909e-dd0027504b8e nodeName:}" failed. No retries permitted until 2025-11-24 08:52:01.112035409 +0000 UTC m=+993.228326373 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-etc-swift") pod "swift-proxy-547856594f-nf4j4" (UID: "62d23e96-0761-4def-909e-dd0027504b8e") : configmap "swift-ring-files" not found Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.612514 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62d23e96-0761-4def-909e-dd0027504b8e-log-httpd\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.612737 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62d23e96-0761-4def-909e-dd0027504b8e-run-httpd\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.618020 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62d23e96-0761-4def-909e-dd0027504b8e-config-data\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:00 crc kubenswrapper[4718]: I1124 08:52:00.648879 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc2v5\" (UniqueName: \"kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-kube-api-access-fc2v5\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:01 crc kubenswrapper[4718]: I1124 08:52:01.118234 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-etc-swift\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:01 crc kubenswrapper[4718]: E1124 08:52:01.118473 4718 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Nov 24 08:52:01 crc kubenswrapper[4718]: E1124 08:52:01.118491 4718 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-proxy-547856594f-nf4j4: configmap "swift-ring-files" not found Nov 24 08:52:01 crc kubenswrapper[4718]: E1124 08:52:01.118542 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-etc-swift podName:62d23e96-0761-4def-909e-dd0027504b8e nodeName:}" failed. No retries permitted until 2025-11-24 08:52:02.118525043 +0000 UTC m=+994.234815947 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-etc-swift") pod "swift-proxy-547856594f-nf4j4" (UID: "62d23e96-0761-4def-909e-dd0027504b8e") : configmap "swift-ring-files" not found Nov 24 08:52:01 crc kubenswrapper[4718]: I1124 08:52:01.337220 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-index-lmfm8" event={"ID":"d2bb363b-e3a7-4a05-a44a-62bbf3b7c879","Type":"ContainerStarted","Data":"2528a47fa5b514bf29b9f78c254bc6be0e9eb0d4972de7f9c23dd031e2f8017c"} Nov 24 08:52:01 crc kubenswrapper[4718]: I1124 08:52:01.831606 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:52:01 crc kubenswrapper[4718]: E1124 08:52:01.831876 4718 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Nov 24 08:52:01 crc kubenswrapper[4718]: E1124 08:52:01.832248 4718 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Nov 24 08:52:01 crc kubenswrapper[4718]: E1124 08:52:01.832319 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift podName:883e6594-9051-4dae-85fc-5f7d8bf60bab nodeName:}" failed. No retries permitted until 2025-11-24 08:52:05.832295717 +0000 UTC m=+997.948586621 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift") pod "swift-storage-0" (UID: "883e6594-9051-4dae-85fc-5f7d8bf60bab") : configmap "swift-ring-files" not found Nov 24 08:52:01 crc kubenswrapper[4718]: I1124 08:52:01.860699 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-648556d4d5-vv9mf" Nov 24 08:52:02 crc kubenswrapper[4718]: I1124 08:52:02.136418 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-etc-swift\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:02 crc kubenswrapper[4718]: E1124 08:52:02.136584 4718 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Nov 24 08:52:02 crc kubenswrapper[4718]: E1124 08:52:02.136598 4718 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-proxy-547856594f-nf4j4: configmap "swift-ring-files" not found Nov 24 08:52:02 crc kubenswrapper[4718]: E1124 08:52:02.136648 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-etc-swift podName:62d23e96-0761-4def-909e-dd0027504b8e nodeName:}" failed. No retries permitted until 2025-11-24 08:52:04.136633612 +0000 UTC m=+996.252924516 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-etc-swift") pod "swift-proxy-547856594f-nf4j4" (UID: "62d23e96-0761-4def-909e-dd0027504b8e") : configmap "swift-ring-files" not found Nov 24 08:52:04 crc kubenswrapper[4718]: I1124 08:52:04.178635 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-etc-swift\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:04 crc kubenswrapper[4718]: E1124 08:52:04.178856 4718 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Nov 24 08:52:04 crc kubenswrapper[4718]: E1124 08:52:04.179066 4718 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-proxy-547856594f-nf4j4: configmap "swift-ring-files" not found Nov 24 08:52:04 crc kubenswrapper[4718]: E1124 08:52:04.179173 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-etc-swift podName:62d23e96-0761-4def-909e-dd0027504b8e nodeName:}" failed. No retries permitted until 2025-11-24 08:52:08.17910791 +0000 UTC m=+1000.295398814 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-etc-swift") pod "swift-proxy-547856594f-nf4j4" (UID: "62d23e96-0761-4def-909e-dd0027504b8e") : configmap "swift-ring-files" not found Nov 24 08:52:05 crc kubenswrapper[4718]: I1124 08:52:05.911886 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:52:05 crc kubenswrapper[4718]: E1124 08:52:05.912062 4718 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Nov 24 08:52:05 crc kubenswrapper[4718]: E1124 08:52:05.912084 4718 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Nov 24 08:52:05 crc kubenswrapper[4718]: E1124 08:52:05.912138 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift podName:883e6594-9051-4dae-85fc-5f7d8bf60bab nodeName:}" failed. No retries permitted until 2025-11-24 08:52:13.912119734 +0000 UTC m=+1006.028410638 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift") pod "swift-storage-0" (UID: "883e6594-9051-4dae-85fc-5f7d8bf60bab") : configmap "swift-ring-files" not found Nov 24 08:52:08 crc kubenswrapper[4718]: I1124 08:52:08.239652 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-etc-swift\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:08 crc kubenswrapper[4718]: E1124 08:52:08.239887 4718 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Nov 24 08:52:08 crc kubenswrapper[4718]: E1124 08:52:08.240286 4718 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-proxy-547856594f-nf4j4: configmap "swift-ring-files" not found Nov 24 08:52:08 crc kubenswrapper[4718]: E1124 08:52:08.240351 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-etc-swift podName:62d23e96-0761-4def-909e-dd0027504b8e nodeName:}" failed. No retries permitted until 2025-11-24 08:52:16.240330808 +0000 UTC m=+1008.356621712 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-etc-swift") pod "swift-proxy-547856594f-nf4j4" (UID: "62d23e96-0761-4def-909e-dd0027504b8e") : configmap "swift-ring-files" not found Nov 24 08:52:08 crc kubenswrapper[4718]: I1124 08:52:08.424171 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" event={"ID":"7682a690-8f1e-435b-b29d-5d6e8c60676b","Type":"ContainerStarted","Data":"727209b92b93d40528fdc9ec261f165ecb73ccd2a22f5af50a8a42ad38d43ec3"} Nov 24 08:52:08 crc kubenswrapper[4718]: I1124 08:52:08.426176 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-index-lmfm8" event={"ID":"d2bb363b-e3a7-4a05-a44a-62bbf3b7c879","Type":"ContainerStarted","Data":"21e21a2389b7b4c7b1b84736fee55b2070002ce299f0c2537aa365e65f678e0d"} Nov 24 08:52:08 crc kubenswrapper[4718]: I1124 08:52:08.444834 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" podStartSLOduration=2.452419593 podStartE2EDuration="10.444817491s" podCreationTimestamp="2025-11-24 08:51:58 +0000 UTC" firstStartedPulling="2025-11-24 08:51:59.368649218 +0000 UTC m=+991.484940122" lastFinishedPulling="2025-11-24 08:52:07.361047126 +0000 UTC m=+999.477338020" observedRunningTime="2025-11-24 08:52:08.443723481 +0000 UTC m=+1000.560014385" watchObservedRunningTime="2025-11-24 08:52:08.444817491 +0000 UTC m=+1000.561108395" Nov 24 08:52:08 crc kubenswrapper[4718]: I1124 08:52:08.464303 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-index-lmfm8" podStartSLOduration=2.733427453 podStartE2EDuration="9.464285854s" podCreationTimestamp="2025-11-24 08:51:59 +0000 UTC" firstStartedPulling="2025-11-24 08:52:00.559921323 +0000 UTC m=+992.676212227" lastFinishedPulling="2025-11-24 08:52:07.290779724 +0000 UTC m=+999.407070628" observedRunningTime="2025-11-24 08:52:08.463595455 +0000 UTC m=+1000.579886359" watchObservedRunningTime="2025-11-24 08:52:08.464285854 +0000 UTC m=+1000.580576758" Nov 24 08:52:10 crc kubenswrapper[4718]: I1124 08:52:10.089883 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-index-lmfm8" Nov 24 08:52:10 crc kubenswrapper[4718]: I1124 08:52:10.090262 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/glance-operator-index-lmfm8" Nov 24 08:52:10 crc kubenswrapper[4718]: I1124 08:52:10.119347 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/glance-operator-index-lmfm8" Nov 24 08:52:13 crc kubenswrapper[4718]: I1124 08:52:13.919291 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:52:13 crc kubenswrapper[4718]: E1124 08:52:13.919574 4718 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Nov 24 08:52:13 crc kubenswrapper[4718]: E1124 08:52:13.920006 4718 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Nov 24 08:52:13 crc kubenswrapper[4718]: E1124 08:52:13.920078 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift podName:883e6594-9051-4dae-85fc-5f7d8bf60bab nodeName:}" failed. No retries permitted until 2025-11-24 08:52:29.920058836 +0000 UTC m=+1022.036349740 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift") pod "swift-storage-0" (UID: "883e6594-9051-4dae-85fc-5f7d8bf60bab") : configmap "swift-ring-files" not found Nov 24 08:52:14 crc kubenswrapper[4718]: I1124 08:52:14.464391 4718 generic.go:334] "Generic (PLEG): container finished" podID="7682a690-8f1e-435b-b29d-5d6e8c60676b" containerID="727209b92b93d40528fdc9ec261f165ecb73ccd2a22f5af50a8a42ad38d43ec3" exitCode=0 Nov 24 08:52:14 crc kubenswrapper[4718]: I1124 08:52:14.464431 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" event={"ID":"7682a690-8f1e-435b-b29d-5d6e8c60676b","Type":"ContainerDied","Data":"727209b92b93d40528fdc9ec261f165ecb73ccd2a22f5af50a8a42ad38d43ec3"} Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.738987 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.754591 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/7682a690-8f1e-435b-b29d-5d6e8c60676b-dispersionconf\") pod \"7682a690-8f1e-435b-b29d-5d6e8c60676b\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.754731 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/7682a690-8f1e-435b-b29d-5d6e8c60676b-etc-swift\") pod \"7682a690-8f1e-435b-b29d-5d6e8c60676b\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.754785 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwkkj\" (UniqueName: \"kubernetes.io/projected/7682a690-8f1e-435b-b29d-5d6e8c60676b-kube-api-access-rwkkj\") pod \"7682a690-8f1e-435b-b29d-5d6e8c60676b\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.754816 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7682a690-8f1e-435b-b29d-5d6e8c60676b-scripts\") pod \"7682a690-8f1e-435b-b29d-5d6e8c60676b\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.754870 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/7682a690-8f1e-435b-b29d-5d6e8c60676b-swiftconf\") pod \"7682a690-8f1e-435b-b29d-5d6e8c60676b\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.754904 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/7682a690-8f1e-435b-b29d-5d6e8c60676b-ring-data-devices\") pod \"7682a690-8f1e-435b-b29d-5d6e8c60676b\" (UID: \"7682a690-8f1e-435b-b29d-5d6e8c60676b\") " Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.756194 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7682a690-8f1e-435b-b29d-5d6e8c60676b-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "7682a690-8f1e-435b-b29d-5d6e8c60676b" (UID: "7682a690-8f1e-435b-b29d-5d6e8c60676b"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.756513 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7682a690-8f1e-435b-b29d-5d6e8c60676b-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "7682a690-8f1e-435b-b29d-5d6e8c60676b" (UID: "7682a690-8f1e-435b-b29d-5d6e8c60676b"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.768546 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7682a690-8f1e-435b-b29d-5d6e8c60676b-kube-api-access-rwkkj" (OuterVolumeSpecName: "kube-api-access-rwkkj") pod "7682a690-8f1e-435b-b29d-5d6e8c60676b" (UID: "7682a690-8f1e-435b-b29d-5d6e8c60676b"). InnerVolumeSpecName "kube-api-access-rwkkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.775709 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7682a690-8f1e-435b-b29d-5d6e8c60676b-scripts" (OuterVolumeSpecName: "scripts") pod "7682a690-8f1e-435b-b29d-5d6e8c60676b" (UID: "7682a690-8f1e-435b-b29d-5d6e8c60676b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.784065 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7682a690-8f1e-435b-b29d-5d6e8c60676b-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "7682a690-8f1e-435b-b29d-5d6e8c60676b" (UID: "7682a690-8f1e-435b-b29d-5d6e8c60676b"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.787378 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7682a690-8f1e-435b-b29d-5d6e8c60676b-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "7682a690-8f1e-435b-b29d-5d6e8c60676b" (UID: "7682a690-8f1e-435b-b29d-5d6e8c60676b"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.856676 4718 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/7682a690-8f1e-435b-b29d-5d6e8c60676b-etc-swift\") on node \"crc\" DevicePath \"\"" Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.856728 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwkkj\" (UniqueName: \"kubernetes.io/projected/7682a690-8f1e-435b-b29d-5d6e8c60676b-kube-api-access-rwkkj\") on node \"crc\" DevicePath \"\"" Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.856746 4718 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7682a690-8f1e-435b-b29d-5d6e8c60676b-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.856757 4718 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/7682a690-8f1e-435b-b29d-5d6e8c60676b-swiftconf\") on node \"crc\" DevicePath \"\"" Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.856768 4718 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/7682a690-8f1e-435b-b29d-5d6e8c60676b-ring-data-devices\") on node \"crc\" DevicePath \"\"" Nov 24 08:52:15 crc kubenswrapper[4718]: I1124 08:52:15.856779 4718 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/7682a690-8f1e-435b-b29d-5d6e8c60676b-dispersionconf\") on node \"crc\" DevicePath \"\"" Nov 24 08:52:16 crc kubenswrapper[4718]: I1124 08:52:16.262112 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-etc-swift\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:16 crc kubenswrapper[4718]: I1124 08:52:16.267331 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/62d23e96-0761-4def-909e-dd0027504b8e-etc-swift\") pod \"swift-proxy-547856594f-nf4j4\" (UID: \"62d23e96-0761-4def-909e-dd0027504b8e\") " pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:16 crc kubenswrapper[4718]: I1124 08:52:16.335650 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:16 crc kubenswrapper[4718]: I1124 08:52:16.501873 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" event={"ID":"7682a690-8f1e-435b-b29d-5d6e8c60676b","Type":"ContainerDied","Data":"b98f4633fc426108154755ed9dbfbc9cbaf33c2efa7c4938b640a4c701966078"} Nov 24 08:52:16 crc kubenswrapper[4718]: I1124 08:52:16.501915 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b98f4633fc426108154755ed9dbfbc9cbaf33c2efa7c4938b640a4c701966078" Nov 24 08:52:16 crc kubenswrapper[4718]: I1124 08:52:16.501982 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-ring-rebalance-jmbxx" Nov 24 08:52:16 crc kubenswrapper[4718]: I1124 08:52:16.672513 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/swift-proxy-547856594f-nf4j4"] Nov 24 08:52:16 crc kubenswrapper[4718]: W1124 08:52:16.673351 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62d23e96_0761_4def_909e_dd0027504b8e.slice/crio-78d407f314f35d6c7059ea9be673ef4212833cf60ad376c388d9e34f8378b929 WatchSource:0}: Error finding container 78d407f314f35d6c7059ea9be673ef4212833cf60ad376c388d9e34f8378b929: Status 404 returned error can't find the container with id 78d407f314f35d6c7059ea9be673ef4212833cf60ad376c388d9e34f8378b929 Nov 24 08:52:17 crc kubenswrapper[4718]: I1124 08:52:17.511530 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" event={"ID":"62d23e96-0761-4def-909e-dd0027504b8e","Type":"ContainerStarted","Data":"2765c3145a6e91f662e59f1e86f0ab0d97fde2806b872c7b00f9b6c58a5de407"} Nov 24 08:52:17 crc kubenswrapper[4718]: I1124 08:52:17.511854 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:17 crc kubenswrapper[4718]: I1124 08:52:17.511866 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" event={"ID":"62d23e96-0761-4def-909e-dd0027504b8e","Type":"ContainerStarted","Data":"3fe071ebed6c869c5475cf6ba498c38db9ef55fe5e0c614d34fd0890f95bcb8b"} Nov 24 08:52:17 crc kubenswrapper[4718]: I1124 08:52:17.511876 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" event={"ID":"62d23e96-0761-4def-909e-dd0027504b8e","Type":"ContainerStarted","Data":"78d407f314f35d6c7059ea9be673ef4212833cf60ad376c388d9e34f8378b929"} Nov 24 08:52:17 crc kubenswrapper[4718]: I1124 08:52:17.536920 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" podStartSLOduration=17.536898827999998 podStartE2EDuration="17.536898828s" podCreationTimestamp="2025-11-24 08:52:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:52:17.535960512 +0000 UTC m=+1009.652251436" watchObservedRunningTime="2025-11-24 08:52:17.536898828 +0000 UTC m=+1009.653189732" Nov 24 08:52:18 crc kubenswrapper[4718]: I1124 08:52:18.517571 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:20 crc kubenswrapper[4718]: I1124 08:52:20.114051 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-index-lmfm8" Nov 24 08:52:21 crc kubenswrapper[4718]: I1124 08:52:21.341561 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:21 crc kubenswrapper[4718]: I1124 08:52:21.342461 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/swift-proxy-547856594f-nf4j4" Nov 24 08:52:29 crc kubenswrapper[4718]: I1124 08:52:29.973846 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:52:29 crc kubenswrapper[4718]: I1124 08:52:29.988510 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/883e6594-9051-4dae-85fc-5f7d8bf60bab-etc-swift\") pod \"swift-storage-0\" (UID: \"883e6594-9051-4dae-85fc-5f7d8bf60bab\") " pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:52:30 crc kubenswrapper[4718]: I1124 08:52:30.166746 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-storage-0" Nov 24 08:52:30 crc kubenswrapper[4718]: I1124 08:52:30.594268 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/swift-storage-0"] Nov 24 08:52:30 crc kubenswrapper[4718]: W1124 08:52:30.598923 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod883e6594_9051_4dae_85fc_5f7d8bf60bab.slice/crio-b1b150e1df523528490b43dbd7394b057a6a00e350711c0db05001353c8ca1a5 WatchSource:0}: Error finding container b1b150e1df523528490b43dbd7394b057a6a00e350711c0db05001353c8ca1a5: Status 404 returned error can't find the container with id b1b150e1df523528490b43dbd7394b057a6a00e350711c0db05001353c8ca1a5 Nov 24 08:52:31 crc kubenswrapper[4718]: I1124 08:52:31.612553 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"883e6594-9051-4dae-85fc-5f7d8bf60bab","Type":"ContainerStarted","Data":"b1b150e1df523528490b43dbd7394b057a6a00e350711c0db05001353c8ca1a5"} Nov 24 08:52:31 crc kubenswrapper[4718]: I1124 08:52:31.802551 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr"] Nov 24 08:52:31 crc kubenswrapper[4718]: E1124 08:52:31.802913 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7682a690-8f1e-435b-b29d-5d6e8c60676b" containerName="swift-ring-rebalance" Nov 24 08:52:31 crc kubenswrapper[4718]: I1124 08:52:31.802938 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="7682a690-8f1e-435b-b29d-5d6e8c60676b" containerName="swift-ring-rebalance" Nov 24 08:52:31 crc kubenswrapper[4718]: I1124 08:52:31.803137 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="7682a690-8f1e-435b-b29d-5d6e8c60676b" containerName="swift-ring-rebalance" Nov 24 08:52:31 crc kubenswrapper[4718]: I1124 08:52:31.804273 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr" Nov 24 08:52:31 crc kubenswrapper[4718]: I1124 08:52:31.811616 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-fq6vw" Nov 24 08:52:31 crc kubenswrapper[4718]: I1124 08:52:31.821540 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr"] Nov 24 08:52:31 crc kubenswrapper[4718]: I1124 08:52:31.904453 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/369e75e0-4d80-470a-8524-785ff06f217e-util\") pod \"40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr\" (UID: \"369e75e0-4d80-470a-8524-785ff06f217e\") " pod="openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr" Nov 24 08:52:31 crc kubenswrapper[4718]: I1124 08:52:31.904523 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/369e75e0-4d80-470a-8524-785ff06f217e-bundle\") pod \"40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr\" (UID: \"369e75e0-4d80-470a-8524-785ff06f217e\") " pod="openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr" Nov 24 08:52:31 crc kubenswrapper[4718]: I1124 08:52:31.904729 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5vrd\" (UniqueName: \"kubernetes.io/projected/369e75e0-4d80-470a-8524-785ff06f217e-kube-api-access-v5vrd\") pod \"40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr\" (UID: \"369e75e0-4d80-470a-8524-785ff06f217e\") " pod="openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr" Nov 24 08:52:32 crc kubenswrapper[4718]: I1124 08:52:32.006873 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/369e75e0-4d80-470a-8524-785ff06f217e-util\") pod \"40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr\" (UID: \"369e75e0-4d80-470a-8524-785ff06f217e\") " pod="openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr" Nov 24 08:52:32 crc kubenswrapper[4718]: I1124 08:52:32.006997 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/369e75e0-4d80-470a-8524-785ff06f217e-bundle\") pod \"40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr\" (UID: \"369e75e0-4d80-470a-8524-785ff06f217e\") " pod="openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr" Nov 24 08:52:32 crc kubenswrapper[4718]: I1124 08:52:32.007128 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5vrd\" (UniqueName: \"kubernetes.io/projected/369e75e0-4d80-470a-8524-785ff06f217e-kube-api-access-v5vrd\") pod \"40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr\" (UID: \"369e75e0-4d80-470a-8524-785ff06f217e\") " pod="openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr" Nov 24 08:52:32 crc kubenswrapper[4718]: I1124 08:52:32.007320 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/369e75e0-4d80-470a-8524-785ff06f217e-util\") pod \"40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr\" (UID: \"369e75e0-4d80-470a-8524-785ff06f217e\") " pod="openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr" Nov 24 08:52:32 crc kubenswrapper[4718]: I1124 08:52:32.007543 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/369e75e0-4d80-470a-8524-785ff06f217e-bundle\") pod \"40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr\" (UID: \"369e75e0-4d80-470a-8524-785ff06f217e\") " pod="openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr" Nov 24 08:52:32 crc kubenswrapper[4718]: I1124 08:52:32.035988 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5vrd\" (UniqueName: \"kubernetes.io/projected/369e75e0-4d80-470a-8524-785ff06f217e-kube-api-access-v5vrd\") pod \"40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr\" (UID: \"369e75e0-4d80-470a-8524-785ff06f217e\") " pod="openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr" Nov 24 08:52:32 crc kubenswrapper[4718]: I1124 08:52:32.133583 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr" Nov 24 08:52:32 crc kubenswrapper[4718]: I1124 08:52:32.456677 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr"] Nov 24 08:52:32 crc kubenswrapper[4718]: W1124 08:52:32.465910 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod369e75e0_4d80_470a_8524_785ff06f217e.slice/crio-6f7a57358ef321529db4f665928691eece52a777b277b2e38514d38f9cbe1c7d WatchSource:0}: Error finding container 6f7a57358ef321529db4f665928691eece52a777b277b2e38514d38f9cbe1c7d: Status 404 returned error can't find the container with id 6f7a57358ef321529db4f665928691eece52a777b277b2e38514d38f9cbe1c7d Nov 24 08:52:32 crc kubenswrapper[4718]: I1124 08:52:32.619502 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"883e6594-9051-4dae-85fc-5f7d8bf60bab","Type":"ContainerStarted","Data":"a3076e86c78b960a50c04350e0fbcfb3df4d025c7431e7aec2a258c11fe2469f"} Nov 24 08:52:32 crc kubenswrapper[4718]: I1124 08:52:32.620740 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr" event={"ID":"369e75e0-4d80-470a-8524-785ff06f217e","Type":"ContainerStarted","Data":"6f7a57358ef321529db4f665928691eece52a777b277b2e38514d38f9cbe1c7d"} Nov 24 08:52:33 crc kubenswrapper[4718]: I1124 08:52:33.630122 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"883e6594-9051-4dae-85fc-5f7d8bf60bab","Type":"ContainerStarted","Data":"e0abcff5d0b430b8906b35a7fffd536937b0d6425541feeb60836494534ce8ae"} Nov 24 08:52:33 crc kubenswrapper[4718]: I1124 08:52:33.630335 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"883e6594-9051-4dae-85fc-5f7d8bf60bab","Type":"ContainerStarted","Data":"0c65b66b3aa1b0827a20d4a9ac688a1615a2d91f60217447e24a9dbed20b08df"} Nov 24 08:52:33 crc kubenswrapper[4718]: I1124 08:52:33.630345 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"883e6594-9051-4dae-85fc-5f7d8bf60bab","Type":"ContainerStarted","Data":"9a7b5f99dacbb3a1a534f18a84c6a25cd9e5d4761639c9670f5c15758a085611"} Nov 24 08:52:33 crc kubenswrapper[4718]: I1124 08:52:33.631481 4718 generic.go:334] "Generic (PLEG): container finished" podID="369e75e0-4d80-470a-8524-785ff06f217e" containerID="363eb4435f1f46ebe080a3b1f53acf76a0a30e3920c94d0f11af88bf1566c387" exitCode=0 Nov 24 08:52:33 crc kubenswrapper[4718]: I1124 08:52:33.631508 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr" event={"ID":"369e75e0-4d80-470a-8524-785ff06f217e","Type":"ContainerDied","Data":"363eb4435f1f46ebe080a3b1f53acf76a0a30e3920c94d0f11af88bf1566c387"} Nov 24 08:52:34 crc kubenswrapper[4718]: I1124 08:52:34.641564 4718 generic.go:334] "Generic (PLEG): container finished" podID="369e75e0-4d80-470a-8524-785ff06f217e" containerID="4783eb8d0030441b6d7403fb9aa143d4cbfb121cf593639590a90d342638fe81" exitCode=0 Nov 24 08:52:34 crc kubenswrapper[4718]: I1124 08:52:34.641631 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr" event={"ID":"369e75e0-4d80-470a-8524-785ff06f217e","Type":"ContainerDied","Data":"4783eb8d0030441b6d7403fb9aa143d4cbfb121cf593639590a90d342638fe81"} Nov 24 08:52:35 crc kubenswrapper[4718]: I1124 08:52:35.652296 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"883e6594-9051-4dae-85fc-5f7d8bf60bab","Type":"ContainerStarted","Data":"50951f0a7d06ee0c9d860e0fca5f224da75b57ed7f033208284b954775b3bd67"} Nov 24 08:52:35 crc kubenswrapper[4718]: I1124 08:52:35.652677 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"883e6594-9051-4dae-85fc-5f7d8bf60bab","Type":"ContainerStarted","Data":"eead6e9d0c646430cda4bf200740433d05caf7aa85b65dc665f2d5756a3b36b7"} Nov 24 08:52:35 crc kubenswrapper[4718]: I1124 08:52:35.652697 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"883e6594-9051-4dae-85fc-5f7d8bf60bab","Type":"ContainerStarted","Data":"a326b9db9254a65f903f0c477714a37271abe37825ce279adba0fc8b8859a076"} Nov 24 08:52:35 crc kubenswrapper[4718]: I1124 08:52:35.652710 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"883e6594-9051-4dae-85fc-5f7d8bf60bab","Type":"ContainerStarted","Data":"04f4db00a675789fbc78d2d4805b734a746b04b2a11d0cbfb7367471e8d6f6fb"} Nov 24 08:52:35 crc kubenswrapper[4718]: I1124 08:52:35.654553 4718 generic.go:334] "Generic (PLEG): container finished" podID="369e75e0-4d80-470a-8524-785ff06f217e" containerID="c93c62c81961be49f5b8e4efeb0a5fd0a4a46937e14f8eae95f671d37d6b5a6e" exitCode=0 Nov 24 08:52:35 crc kubenswrapper[4718]: I1124 08:52:35.654604 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr" event={"ID":"369e75e0-4d80-470a-8524-785ff06f217e","Type":"ContainerDied","Data":"c93c62c81961be49f5b8e4efeb0a5fd0a4a46937e14f8eae95f671d37d6b5a6e"} Nov 24 08:52:36 crc kubenswrapper[4718]: I1124 08:52:36.896538 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr" Nov 24 08:52:36 crc kubenswrapper[4718]: I1124 08:52:36.979999 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/369e75e0-4d80-470a-8524-785ff06f217e-util\") pod \"369e75e0-4d80-470a-8524-785ff06f217e\" (UID: \"369e75e0-4d80-470a-8524-785ff06f217e\") " Nov 24 08:52:36 crc kubenswrapper[4718]: I1124 08:52:36.980154 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5vrd\" (UniqueName: \"kubernetes.io/projected/369e75e0-4d80-470a-8524-785ff06f217e-kube-api-access-v5vrd\") pod \"369e75e0-4d80-470a-8524-785ff06f217e\" (UID: \"369e75e0-4d80-470a-8524-785ff06f217e\") " Nov 24 08:52:36 crc kubenswrapper[4718]: I1124 08:52:36.980183 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/369e75e0-4d80-470a-8524-785ff06f217e-bundle\") pod \"369e75e0-4d80-470a-8524-785ff06f217e\" (UID: \"369e75e0-4d80-470a-8524-785ff06f217e\") " Nov 24 08:52:36 crc kubenswrapper[4718]: I1124 08:52:36.981008 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/369e75e0-4d80-470a-8524-785ff06f217e-bundle" (OuterVolumeSpecName: "bundle") pod "369e75e0-4d80-470a-8524-785ff06f217e" (UID: "369e75e0-4d80-470a-8524-785ff06f217e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:52:36 crc kubenswrapper[4718]: I1124 08:52:36.986051 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/369e75e0-4d80-470a-8524-785ff06f217e-kube-api-access-v5vrd" (OuterVolumeSpecName: "kube-api-access-v5vrd") pod "369e75e0-4d80-470a-8524-785ff06f217e" (UID: "369e75e0-4d80-470a-8524-785ff06f217e"). InnerVolumeSpecName "kube-api-access-v5vrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:52:36 crc kubenswrapper[4718]: I1124 08:52:36.994423 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/369e75e0-4d80-470a-8524-785ff06f217e-util" (OuterVolumeSpecName: "util") pod "369e75e0-4d80-470a-8524-785ff06f217e" (UID: "369e75e0-4d80-470a-8524-785ff06f217e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:52:37 crc kubenswrapper[4718]: I1124 08:52:37.081467 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5vrd\" (UniqueName: \"kubernetes.io/projected/369e75e0-4d80-470a-8524-785ff06f217e-kube-api-access-v5vrd\") on node \"crc\" DevicePath \"\"" Nov 24 08:52:37 crc kubenswrapper[4718]: I1124 08:52:37.081818 4718 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/369e75e0-4d80-470a-8524-785ff06f217e-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:52:37 crc kubenswrapper[4718]: I1124 08:52:37.081830 4718 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/369e75e0-4d80-470a-8524-785ff06f217e-util\") on node \"crc\" DevicePath \"\"" Nov 24 08:52:37 crc kubenswrapper[4718]: I1124 08:52:37.669766 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr" event={"ID":"369e75e0-4d80-470a-8524-785ff06f217e","Type":"ContainerDied","Data":"6f7a57358ef321529db4f665928691eece52a777b277b2e38514d38f9cbe1c7d"} Nov 24 08:52:37 crc kubenswrapper[4718]: I1124 08:52:37.669805 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f7a57358ef321529db4f665928691eece52a777b277b2e38514d38f9cbe1c7d" Nov 24 08:52:37 crc kubenswrapper[4718]: I1124 08:52:37.669866 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr" Nov 24 08:52:38 crc kubenswrapper[4718]: I1124 08:52:38.681371 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"883e6594-9051-4dae-85fc-5f7d8bf60bab","Type":"ContainerStarted","Data":"4e97a1b7ccfed870fb28e277ad654e88acdbb788f0ca53d473abb54df8d2162a"} Nov 24 08:52:38 crc kubenswrapper[4718]: I1124 08:52:38.681716 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"883e6594-9051-4dae-85fc-5f7d8bf60bab","Type":"ContainerStarted","Data":"b36cabda3ad596c95d94496bf2edd13001b3559c89adef221f841674c856d5f7"} Nov 24 08:52:38 crc kubenswrapper[4718]: I1124 08:52:38.681733 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"883e6594-9051-4dae-85fc-5f7d8bf60bab","Type":"ContainerStarted","Data":"b0d36c4501f8c2a76f6ca7ead9b118eae3d7fc5ad9f401e1f35910482baa9def"} Nov 24 08:52:38 crc kubenswrapper[4718]: I1124 08:52:38.681745 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"883e6594-9051-4dae-85fc-5f7d8bf60bab","Type":"ContainerStarted","Data":"6de8950752c7f61b23875377ef2d6b2ec47148ad95713304a04f81e86bf43b0e"} Nov 24 08:52:39 crc kubenswrapper[4718]: I1124 08:52:39.698165 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"883e6594-9051-4dae-85fc-5f7d8bf60bab","Type":"ContainerStarted","Data":"fd91781a5e6aab6159bb805c3711258983a271312c0052417a1323d575a86776"} Nov 24 08:52:39 crc kubenswrapper[4718]: I1124 08:52:39.698209 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"883e6594-9051-4dae-85fc-5f7d8bf60bab","Type":"ContainerStarted","Data":"cc755220413538d21ebfc105d886bfb5005e90a89c42ad4877b9f43dce154ab1"} Nov 24 08:52:39 crc kubenswrapper[4718]: I1124 08:52:39.698220 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"883e6594-9051-4dae-85fc-5f7d8bf60bab","Type":"ContainerStarted","Data":"d79d8135fa481ac746c8fb1a1a67f82615fa9877faed15f8063c508f05c4c1ed"} Nov 24 08:52:39 crc kubenswrapper[4718]: I1124 08:52:39.736890 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/swift-storage-0" podStartSLOduration=35.263575121 podStartE2EDuration="42.736871881s" podCreationTimestamp="2025-11-24 08:51:57 +0000 UTC" firstStartedPulling="2025-11-24 08:52:30.60116789 +0000 UTC m=+1022.717458794" lastFinishedPulling="2025-11-24 08:52:38.07446465 +0000 UTC m=+1030.190755554" observedRunningTime="2025-11-24 08:52:39.72585026 +0000 UTC m=+1031.842141154" watchObservedRunningTime="2025-11-24 08:52:39.736871881 +0000 UTC m=+1031.853162785" Nov 24 08:52:48 crc kubenswrapper[4718]: I1124 08:52:48.718079 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns"] Nov 24 08:52:48 crc kubenswrapper[4718]: E1124 08:52:48.719022 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="369e75e0-4d80-470a-8524-785ff06f217e" containerName="util" Nov 24 08:52:48 crc kubenswrapper[4718]: I1124 08:52:48.719041 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="369e75e0-4d80-470a-8524-785ff06f217e" containerName="util" Nov 24 08:52:48 crc kubenswrapper[4718]: E1124 08:52:48.719083 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="369e75e0-4d80-470a-8524-785ff06f217e" containerName="extract" Nov 24 08:52:48 crc kubenswrapper[4718]: I1124 08:52:48.719091 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="369e75e0-4d80-470a-8524-785ff06f217e" containerName="extract" Nov 24 08:52:48 crc kubenswrapper[4718]: E1124 08:52:48.719104 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="369e75e0-4d80-470a-8524-785ff06f217e" containerName="pull" Nov 24 08:52:48 crc kubenswrapper[4718]: I1124 08:52:48.719111 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="369e75e0-4d80-470a-8524-785ff06f217e" containerName="pull" Nov 24 08:52:48 crc kubenswrapper[4718]: I1124 08:52:48.719286 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="369e75e0-4d80-470a-8524-785ff06f217e" containerName="extract" Nov 24 08:52:48 crc kubenswrapper[4718]: I1124 08:52:48.720218 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns" Nov 24 08:52:48 crc kubenswrapper[4718]: I1124 08:52:48.723769 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-6phqz" Nov 24 08:52:48 crc kubenswrapper[4718]: I1124 08:52:48.732493 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns"] Nov 24 08:52:48 crc kubenswrapper[4718]: I1124 08:52:48.733687 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-service-cert" Nov 24 08:52:48 crc kubenswrapper[4718]: I1124 08:52:48.751196 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/632a00ec-d777-43a0-ac83-f3543055e722-webhook-cert\") pod \"glance-operator-controller-manager-7b4d7bbb6c-pllns\" (UID: \"632a00ec-d777-43a0-ac83-f3543055e722\") " pod="openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns" Nov 24 08:52:48 crc kubenswrapper[4718]: I1124 08:52:48.751764 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/632a00ec-d777-43a0-ac83-f3543055e722-apiservice-cert\") pod \"glance-operator-controller-manager-7b4d7bbb6c-pllns\" (UID: \"632a00ec-d777-43a0-ac83-f3543055e722\") " pod="openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns" Nov 24 08:52:48 crc kubenswrapper[4718]: I1124 08:52:48.751862 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gr84x\" (UniqueName: \"kubernetes.io/projected/632a00ec-d777-43a0-ac83-f3543055e722-kube-api-access-gr84x\") pod \"glance-operator-controller-manager-7b4d7bbb6c-pllns\" (UID: \"632a00ec-d777-43a0-ac83-f3543055e722\") " pod="openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns" Nov 24 08:52:48 crc kubenswrapper[4718]: I1124 08:52:48.852738 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/632a00ec-d777-43a0-ac83-f3543055e722-webhook-cert\") pod \"glance-operator-controller-manager-7b4d7bbb6c-pllns\" (UID: \"632a00ec-d777-43a0-ac83-f3543055e722\") " pod="openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns" Nov 24 08:52:48 crc kubenswrapper[4718]: I1124 08:52:48.852839 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/632a00ec-d777-43a0-ac83-f3543055e722-apiservice-cert\") pod \"glance-operator-controller-manager-7b4d7bbb6c-pllns\" (UID: \"632a00ec-d777-43a0-ac83-f3543055e722\") " pod="openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns" Nov 24 08:52:48 crc kubenswrapper[4718]: I1124 08:52:48.852892 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gr84x\" (UniqueName: \"kubernetes.io/projected/632a00ec-d777-43a0-ac83-f3543055e722-kube-api-access-gr84x\") pod \"glance-operator-controller-manager-7b4d7bbb6c-pllns\" (UID: \"632a00ec-d777-43a0-ac83-f3543055e722\") " pod="openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns" Nov 24 08:52:48 crc kubenswrapper[4718]: I1124 08:52:48.861360 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/632a00ec-d777-43a0-ac83-f3543055e722-apiservice-cert\") pod \"glance-operator-controller-manager-7b4d7bbb6c-pllns\" (UID: \"632a00ec-d777-43a0-ac83-f3543055e722\") " pod="openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns" Nov 24 08:52:48 crc kubenswrapper[4718]: I1124 08:52:48.869906 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gr84x\" (UniqueName: \"kubernetes.io/projected/632a00ec-d777-43a0-ac83-f3543055e722-kube-api-access-gr84x\") pod \"glance-operator-controller-manager-7b4d7bbb6c-pllns\" (UID: \"632a00ec-d777-43a0-ac83-f3543055e722\") " pod="openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns" Nov 24 08:52:48 crc kubenswrapper[4718]: I1124 08:52:48.872155 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/632a00ec-d777-43a0-ac83-f3543055e722-webhook-cert\") pod \"glance-operator-controller-manager-7b4d7bbb6c-pllns\" (UID: \"632a00ec-d777-43a0-ac83-f3543055e722\") " pod="openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns" Nov 24 08:52:49 crc kubenswrapper[4718]: I1124 08:52:49.052416 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns" Nov 24 08:52:49 crc kubenswrapper[4718]: I1124 08:52:49.469383 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns"] Nov 24 08:52:49 crc kubenswrapper[4718]: W1124 08:52:49.475155 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod632a00ec_d777_43a0_ac83_f3543055e722.slice/crio-65a62f6ae0bf0e2ce553a2e81fb3ccfd2a8841be62668abd0b4f23053ae27144 WatchSource:0}: Error finding container 65a62f6ae0bf0e2ce553a2e81fb3ccfd2a8841be62668abd0b4f23053ae27144: Status 404 returned error can't find the container with id 65a62f6ae0bf0e2ce553a2e81fb3ccfd2a8841be62668abd0b4f23053ae27144 Nov 24 08:52:49 crc kubenswrapper[4718]: I1124 08:52:49.765989 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns" event={"ID":"632a00ec-d777-43a0-ac83-f3543055e722","Type":"ContainerStarted","Data":"65a62f6ae0bf0e2ce553a2e81fb3ccfd2a8841be62668abd0b4f23053ae27144"} Nov 24 08:52:50 crc kubenswrapper[4718]: I1124 08:52:50.774488 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns" event={"ID":"632a00ec-d777-43a0-ac83-f3543055e722","Type":"ContainerStarted","Data":"dd869cb76f1094c5be37fcc56fd4fd6bc8d716a45582c528ea5c6ce61cd6d53a"} Nov 24 08:52:53 crc kubenswrapper[4718]: I1124 08:52:53.794492 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns" event={"ID":"632a00ec-d777-43a0-ac83-f3543055e722","Type":"ContainerStarted","Data":"3c80e1c6d48527fdd57612268ee0f34f10aa1002f8d6c923c8ae41a3b665e8db"} Nov 24 08:52:53 crc kubenswrapper[4718]: I1124 08:52:53.795361 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns" Nov 24 08:52:53 crc kubenswrapper[4718]: I1124 08:52:53.816618 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns" podStartSLOduration=1.770575835 podStartE2EDuration="5.816597577s" podCreationTimestamp="2025-11-24 08:52:48 +0000 UTC" firstStartedPulling="2025-11-24 08:52:49.477428777 +0000 UTC m=+1041.593719681" lastFinishedPulling="2025-11-24 08:52:53.523450519 +0000 UTC m=+1045.639741423" observedRunningTime="2025-11-24 08:52:53.81082885 +0000 UTC m=+1045.927119754" watchObservedRunningTime="2025-11-24 08:52:53.816597577 +0000 UTC m=+1045.932888481" Nov 24 08:52:59 crc kubenswrapper[4718]: I1124 08:52:59.057074 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-7b4d7bbb6c-pllns" Nov 24 08:53:03 crc kubenswrapper[4718]: I1124 08:53:03.774904 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-db-create-xxr4t"] Nov 24 08:53:03 crc kubenswrapper[4718]: I1124 08:53:03.776527 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-xxr4t" Nov 24 08:53:03 crc kubenswrapper[4718]: I1124 08:53:03.782368 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-create-xxr4t"] Nov 24 08:53:03 crc kubenswrapper[4718]: I1124 08:53:03.887636 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbrnb\" (UniqueName: \"kubernetes.io/projected/9c853b5f-821e-4082-a3d4-3d914c6ef527-kube-api-access-rbrnb\") pod \"glance-db-create-xxr4t\" (UID: \"9c853b5f-821e-4082-a3d4-3d914c6ef527\") " pod="glance-kuttl-tests/glance-db-create-xxr4t" Nov 24 08:53:03 crc kubenswrapper[4718]: I1124 08:53:03.887743 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c853b5f-821e-4082-a3d4-3d914c6ef527-operator-scripts\") pod \"glance-db-create-xxr4t\" (UID: \"9c853b5f-821e-4082-a3d4-3d914c6ef527\") " pod="glance-kuttl-tests/glance-db-create-xxr4t" Nov 24 08:53:03 crc kubenswrapper[4718]: I1124 08:53:03.979605 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-aac4-account-create-update-plrtk"] Nov 24 08:53:03 crc kubenswrapper[4718]: I1124 08:53:03.980632 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-aac4-account-create-update-plrtk" Nov 24 08:53:03 crc kubenswrapper[4718]: I1124 08:53:03.982515 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-db-secret" Nov 24 08:53:03 crc kubenswrapper[4718]: I1124 08:53:03.988745 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c853b5f-821e-4082-a3d4-3d914c6ef527-operator-scripts\") pod \"glance-db-create-xxr4t\" (UID: \"9c853b5f-821e-4082-a3d4-3d914c6ef527\") " pod="glance-kuttl-tests/glance-db-create-xxr4t" Nov 24 08:53:03 crc kubenswrapper[4718]: I1124 08:53:03.989088 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbrnb\" (UniqueName: \"kubernetes.io/projected/9c853b5f-821e-4082-a3d4-3d914c6ef527-kube-api-access-rbrnb\") pod \"glance-db-create-xxr4t\" (UID: \"9c853b5f-821e-4082-a3d4-3d914c6ef527\") " pod="glance-kuttl-tests/glance-db-create-xxr4t" Nov 24 08:53:03 crc kubenswrapper[4718]: I1124 08:53:03.989140 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-aac4-account-create-update-plrtk"] Nov 24 08:53:03 crc kubenswrapper[4718]: I1124 08:53:03.990054 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c853b5f-821e-4082-a3d4-3d914c6ef527-operator-scripts\") pod \"glance-db-create-xxr4t\" (UID: \"9c853b5f-821e-4082-a3d4-3d914c6ef527\") " pod="glance-kuttl-tests/glance-db-create-xxr4t" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.012811 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbrnb\" (UniqueName: \"kubernetes.io/projected/9c853b5f-821e-4082-a3d4-3d914c6ef527-kube-api-access-rbrnb\") pod \"glance-db-create-xxr4t\" (UID: \"9c853b5f-821e-4082-a3d4-3d914c6ef527\") " pod="glance-kuttl-tests/glance-db-create-xxr4t" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.038686 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/openstackclient"] Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.039541 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstackclient" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.042071 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"openstack-config-secret" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.042221 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"default-dockercfg-d4cb2" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.042235 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openstack-scripts-9db6gc427h" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.042319 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openstack-config" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.054020 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstackclient"] Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.091221 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-276km\" (UniqueName: \"kubernetes.io/projected/a264ca86-f795-4ba2-945b-e6459a4d1e68-kube-api-access-276km\") pod \"openstackclient\" (UID: \"a264ca86-f795-4ba2-945b-e6459a4d1e68\") " pod="glance-kuttl-tests/openstackclient" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.091286 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a264ca86-f795-4ba2-945b-e6459a4d1e68-openstack-config\") pod \"openstackclient\" (UID: \"a264ca86-f795-4ba2-945b-e6459a4d1e68\") " pod="glance-kuttl-tests/openstackclient" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.091653 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/a264ca86-f795-4ba2-945b-e6459a4d1e68-openstack-scripts\") pod \"openstackclient\" (UID: \"a264ca86-f795-4ba2-945b-e6459a4d1e68\") " pod="glance-kuttl-tests/openstackclient" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.091780 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjwq9\" (UniqueName: \"kubernetes.io/projected/485cca71-1560-4c07-b6eb-2139ec7c4e97-kube-api-access-kjwq9\") pod \"glance-aac4-account-create-update-plrtk\" (UID: \"485cca71-1560-4c07-b6eb-2139ec7c4e97\") " pod="glance-kuttl-tests/glance-aac4-account-create-update-plrtk" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.092075 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a264ca86-f795-4ba2-945b-e6459a4d1e68-openstack-config-secret\") pod \"openstackclient\" (UID: \"a264ca86-f795-4ba2-945b-e6459a4d1e68\") " pod="glance-kuttl-tests/openstackclient" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.092194 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/485cca71-1560-4c07-b6eb-2139ec7c4e97-operator-scripts\") pod \"glance-aac4-account-create-update-plrtk\" (UID: \"485cca71-1560-4c07-b6eb-2139ec7c4e97\") " pod="glance-kuttl-tests/glance-aac4-account-create-update-plrtk" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.094555 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-xxr4t" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.193678 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a264ca86-f795-4ba2-945b-e6459a4d1e68-openstack-config-secret\") pod \"openstackclient\" (UID: \"a264ca86-f795-4ba2-945b-e6459a4d1e68\") " pod="glance-kuttl-tests/openstackclient" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.195478 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/485cca71-1560-4c07-b6eb-2139ec7c4e97-operator-scripts\") pod \"glance-aac4-account-create-update-plrtk\" (UID: \"485cca71-1560-4c07-b6eb-2139ec7c4e97\") " pod="glance-kuttl-tests/glance-aac4-account-create-update-plrtk" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.195626 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-276km\" (UniqueName: \"kubernetes.io/projected/a264ca86-f795-4ba2-945b-e6459a4d1e68-kube-api-access-276km\") pod \"openstackclient\" (UID: \"a264ca86-f795-4ba2-945b-e6459a4d1e68\") " pod="glance-kuttl-tests/openstackclient" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.195693 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a264ca86-f795-4ba2-945b-e6459a4d1e68-openstack-config\") pod \"openstackclient\" (UID: \"a264ca86-f795-4ba2-945b-e6459a4d1e68\") " pod="glance-kuttl-tests/openstackclient" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.195859 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/a264ca86-f795-4ba2-945b-e6459a4d1e68-openstack-scripts\") pod \"openstackclient\" (UID: \"a264ca86-f795-4ba2-945b-e6459a4d1e68\") " pod="glance-kuttl-tests/openstackclient" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.195924 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjwq9\" (UniqueName: \"kubernetes.io/projected/485cca71-1560-4c07-b6eb-2139ec7c4e97-kube-api-access-kjwq9\") pod \"glance-aac4-account-create-update-plrtk\" (UID: \"485cca71-1560-4c07-b6eb-2139ec7c4e97\") " pod="glance-kuttl-tests/glance-aac4-account-create-update-plrtk" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.197618 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/485cca71-1560-4c07-b6eb-2139ec7c4e97-operator-scripts\") pod \"glance-aac4-account-create-update-plrtk\" (UID: \"485cca71-1560-4c07-b6eb-2139ec7c4e97\") " pod="glance-kuttl-tests/glance-aac4-account-create-update-plrtk" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.197738 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a264ca86-f795-4ba2-945b-e6459a4d1e68-openstack-config\") pod \"openstackclient\" (UID: \"a264ca86-f795-4ba2-945b-e6459a4d1e68\") " pod="glance-kuttl-tests/openstackclient" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.197779 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/a264ca86-f795-4ba2-945b-e6459a4d1e68-openstack-scripts\") pod \"openstackclient\" (UID: \"a264ca86-f795-4ba2-945b-e6459a4d1e68\") " pod="glance-kuttl-tests/openstackclient" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.212176 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a264ca86-f795-4ba2-945b-e6459a4d1e68-openstack-config-secret\") pod \"openstackclient\" (UID: \"a264ca86-f795-4ba2-945b-e6459a4d1e68\") " pod="glance-kuttl-tests/openstackclient" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.215904 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-276km\" (UniqueName: \"kubernetes.io/projected/a264ca86-f795-4ba2-945b-e6459a4d1e68-kube-api-access-276km\") pod \"openstackclient\" (UID: \"a264ca86-f795-4ba2-945b-e6459a4d1e68\") " pod="glance-kuttl-tests/openstackclient" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.217832 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjwq9\" (UniqueName: \"kubernetes.io/projected/485cca71-1560-4c07-b6eb-2139ec7c4e97-kube-api-access-kjwq9\") pod \"glance-aac4-account-create-update-plrtk\" (UID: \"485cca71-1560-4c07-b6eb-2139ec7c4e97\") " pod="glance-kuttl-tests/glance-aac4-account-create-update-plrtk" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.300567 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-aac4-account-create-update-plrtk" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.359116 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstackclient" Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.515564 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-create-xxr4t"] Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.624905 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstackclient"] Nov 24 08:53:04 crc kubenswrapper[4718]: W1124 08:53:04.630374 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda264ca86_f795_4ba2_945b_e6459a4d1e68.slice/crio-d21bfe1fb23b81f17ae3777b6ac7d57fe95e3fce7d3e06490c08ca5497b663a7 WatchSource:0}: Error finding container d21bfe1fb23b81f17ae3777b6ac7d57fe95e3fce7d3e06490c08ca5497b663a7: Status 404 returned error can't find the container with id d21bfe1fb23b81f17ae3777b6ac7d57fe95e3fce7d3e06490c08ca5497b663a7 Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.689713 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-aac4-account-create-update-plrtk"] Nov 24 08:53:04 crc kubenswrapper[4718]: W1124 08:53:04.695643 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod485cca71_1560_4c07_b6eb_2139ec7c4e97.slice/crio-88c355e0c7d23e47877bd8a335ac8a68bcf75a713d5cbbe5428c6594d4cb420c WatchSource:0}: Error finding container 88c355e0c7d23e47877bd8a335ac8a68bcf75a713d5cbbe5428c6594d4cb420c: Status 404 returned error can't find the container with id 88c355e0c7d23e47877bd8a335ac8a68bcf75a713d5cbbe5428c6594d4cb420c Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.869835 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstackclient" event={"ID":"a264ca86-f795-4ba2-945b-e6459a4d1e68","Type":"ContainerStarted","Data":"d21bfe1fb23b81f17ae3777b6ac7d57fe95e3fce7d3e06490c08ca5497b663a7"} Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.870693 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-aac4-account-create-update-plrtk" event={"ID":"485cca71-1560-4c07-b6eb-2139ec7c4e97","Type":"ContainerStarted","Data":"88c355e0c7d23e47877bd8a335ac8a68bcf75a713d5cbbe5428c6594d4cb420c"} Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.872007 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-xxr4t" event={"ID":"9c853b5f-821e-4082-a3d4-3d914c6ef527","Type":"ContainerStarted","Data":"abd276dcf11f3eb6dd7bbb9bc3b1b05bc15c6440b64eef5ed13117d57906df69"} Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.872041 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-xxr4t" event={"ID":"9c853b5f-821e-4082-a3d4-3d914c6ef527","Type":"ContainerStarted","Data":"855c3add2327df5ae2767dab38874643f28ed92d0983d8626256544413cf8f87"} Nov 24 08:53:04 crc kubenswrapper[4718]: I1124 08:53:04.889516 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-db-create-xxr4t" podStartSLOduration=1.8894953330000002 podStartE2EDuration="1.889495333s" podCreationTimestamp="2025-11-24 08:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:53:04.888077188 +0000 UTC m=+1057.004368102" watchObservedRunningTime="2025-11-24 08:53:04.889495333 +0000 UTC m=+1057.005786237" Nov 24 08:53:05 crc kubenswrapper[4718]: I1124 08:53:05.884753 4718 generic.go:334] "Generic (PLEG): container finished" podID="485cca71-1560-4c07-b6eb-2139ec7c4e97" containerID="0f45731e8cd593fda95477aa9f0262cb9e5bda6cb963b256065af6b3b8a81fff" exitCode=0 Nov 24 08:53:05 crc kubenswrapper[4718]: I1124 08:53:05.885096 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-aac4-account-create-update-plrtk" event={"ID":"485cca71-1560-4c07-b6eb-2139ec7c4e97","Type":"ContainerDied","Data":"0f45731e8cd593fda95477aa9f0262cb9e5bda6cb963b256065af6b3b8a81fff"} Nov 24 08:53:05 crc kubenswrapper[4718]: I1124 08:53:05.891662 4718 generic.go:334] "Generic (PLEG): container finished" podID="9c853b5f-821e-4082-a3d4-3d914c6ef527" containerID="abd276dcf11f3eb6dd7bbb9bc3b1b05bc15c6440b64eef5ed13117d57906df69" exitCode=0 Nov 24 08:53:05 crc kubenswrapper[4718]: I1124 08:53:05.891699 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-xxr4t" event={"ID":"9c853b5f-821e-4082-a3d4-3d914c6ef527","Type":"ContainerDied","Data":"abd276dcf11f3eb6dd7bbb9bc3b1b05bc15c6440b64eef5ed13117d57906df69"} Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.239240 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-aac4-account-create-update-plrtk" Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.344668 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-xxr4t" Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.394535 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/485cca71-1560-4c07-b6eb-2139ec7c4e97-operator-scripts\") pod \"485cca71-1560-4c07-b6eb-2139ec7c4e97\" (UID: \"485cca71-1560-4c07-b6eb-2139ec7c4e97\") " Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.394657 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjwq9\" (UniqueName: \"kubernetes.io/projected/485cca71-1560-4c07-b6eb-2139ec7c4e97-kube-api-access-kjwq9\") pod \"485cca71-1560-4c07-b6eb-2139ec7c4e97\" (UID: \"485cca71-1560-4c07-b6eb-2139ec7c4e97\") " Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.395414 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/485cca71-1560-4c07-b6eb-2139ec7c4e97-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "485cca71-1560-4c07-b6eb-2139ec7c4e97" (UID: "485cca71-1560-4c07-b6eb-2139ec7c4e97"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.403376 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/485cca71-1560-4c07-b6eb-2139ec7c4e97-kube-api-access-kjwq9" (OuterVolumeSpecName: "kube-api-access-kjwq9") pod "485cca71-1560-4c07-b6eb-2139ec7c4e97" (UID: "485cca71-1560-4c07-b6eb-2139ec7c4e97"). InnerVolumeSpecName "kube-api-access-kjwq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.496115 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c853b5f-821e-4082-a3d4-3d914c6ef527-operator-scripts\") pod \"9c853b5f-821e-4082-a3d4-3d914c6ef527\" (UID: \"9c853b5f-821e-4082-a3d4-3d914c6ef527\") " Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.496178 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbrnb\" (UniqueName: \"kubernetes.io/projected/9c853b5f-821e-4082-a3d4-3d914c6ef527-kube-api-access-rbrnb\") pod \"9c853b5f-821e-4082-a3d4-3d914c6ef527\" (UID: \"9c853b5f-821e-4082-a3d4-3d914c6ef527\") " Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.496458 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjwq9\" (UniqueName: \"kubernetes.io/projected/485cca71-1560-4c07-b6eb-2139ec7c4e97-kube-api-access-kjwq9\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.496471 4718 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/485cca71-1560-4c07-b6eb-2139ec7c4e97-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.496759 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c853b5f-821e-4082-a3d4-3d914c6ef527-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9c853b5f-821e-4082-a3d4-3d914c6ef527" (UID: "9c853b5f-821e-4082-a3d4-3d914c6ef527"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.500006 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c853b5f-821e-4082-a3d4-3d914c6ef527-kube-api-access-rbrnb" (OuterVolumeSpecName: "kube-api-access-rbrnb") pod "9c853b5f-821e-4082-a3d4-3d914c6ef527" (UID: "9c853b5f-821e-4082-a3d4-3d914c6ef527"). InnerVolumeSpecName "kube-api-access-rbrnb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.598571 4718 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c853b5f-821e-4082-a3d4-3d914c6ef527-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.598607 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbrnb\" (UniqueName: \"kubernetes.io/projected/9c853b5f-821e-4082-a3d4-3d914c6ef527-kube-api-access-rbrnb\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.910744 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-aac4-account-create-update-plrtk" event={"ID":"485cca71-1560-4c07-b6eb-2139ec7c4e97","Type":"ContainerDied","Data":"88c355e0c7d23e47877bd8a335ac8a68bcf75a713d5cbbe5428c6594d4cb420c"} Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.910774 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-aac4-account-create-update-plrtk" Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.910803 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88c355e0c7d23e47877bd8a335ac8a68bcf75a713d5cbbe5428c6594d4cb420c" Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.913200 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-xxr4t" event={"ID":"9c853b5f-821e-4082-a3d4-3d914c6ef527","Type":"ContainerDied","Data":"855c3add2327df5ae2767dab38874643f28ed92d0983d8626256544413cf8f87"} Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.913242 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="855c3add2327df5ae2767dab38874643f28ed92d0983d8626256544413cf8f87" Nov 24 08:53:07 crc kubenswrapper[4718]: I1124 08:53:07.913265 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-xxr4t" Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.130900 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-db-sync-tcnh5"] Nov 24 08:53:09 crc kubenswrapper[4718]: E1124 08:53:09.131583 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="485cca71-1560-4c07-b6eb-2139ec7c4e97" containerName="mariadb-account-create-update" Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.131600 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="485cca71-1560-4c07-b6eb-2139ec7c4e97" containerName="mariadb-account-create-update" Nov 24 08:53:09 crc kubenswrapper[4718]: E1124 08:53:09.131617 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c853b5f-821e-4082-a3d4-3d914c6ef527" containerName="mariadb-database-create" Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.131625 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c853b5f-821e-4082-a3d4-3d914c6ef527" containerName="mariadb-database-create" Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.131776 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="485cca71-1560-4c07-b6eb-2139ec7c4e97" containerName="mariadb-account-create-update" Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.131803 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c853b5f-821e-4082-a3d4-3d914c6ef527" containerName="mariadb-database-create" Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.132399 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-tcnh5" Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.136621 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-config-data" Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.136750 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-scpbq" Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.147025 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-sync-tcnh5"] Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.232677 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9t2p9\" (UniqueName: \"kubernetes.io/projected/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c-kube-api-access-9t2p9\") pod \"glance-db-sync-tcnh5\" (UID: \"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c\") " pod="glance-kuttl-tests/glance-db-sync-tcnh5" Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.232786 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c-config-data\") pod \"glance-db-sync-tcnh5\" (UID: \"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c\") " pod="glance-kuttl-tests/glance-db-sync-tcnh5" Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.232833 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c-db-sync-config-data\") pod \"glance-db-sync-tcnh5\" (UID: \"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c\") " pod="glance-kuttl-tests/glance-db-sync-tcnh5" Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.333721 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c-config-data\") pod \"glance-db-sync-tcnh5\" (UID: \"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c\") " pod="glance-kuttl-tests/glance-db-sync-tcnh5" Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.333788 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c-db-sync-config-data\") pod \"glance-db-sync-tcnh5\" (UID: \"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c\") " pod="glance-kuttl-tests/glance-db-sync-tcnh5" Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.333840 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9t2p9\" (UniqueName: \"kubernetes.io/projected/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c-kube-api-access-9t2p9\") pod \"glance-db-sync-tcnh5\" (UID: \"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c\") " pod="glance-kuttl-tests/glance-db-sync-tcnh5" Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.338620 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c-db-sync-config-data\") pod \"glance-db-sync-tcnh5\" (UID: \"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c\") " pod="glance-kuttl-tests/glance-db-sync-tcnh5" Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.351997 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9t2p9\" (UniqueName: \"kubernetes.io/projected/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c-kube-api-access-9t2p9\") pod \"glance-db-sync-tcnh5\" (UID: \"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c\") " pod="glance-kuttl-tests/glance-db-sync-tcnh5" Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.352790 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c-config-data\") pod \"glance-db-sync-tcnh5\" (UID: \"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c\") " pod="glance-kuttl-tests/glance-db-sync-tcnh5" Nov 24 08:53:09 crc kubenswrapper[4718]: I1124 08:53:09.451624 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-tcnh5" Nov 24 08:53:12 crc kubenswrapper[4718]: I1124 08:53:12.547226 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-sync-tcnh5"] Nov 24 08:53:12 crc kubenswrapper[4718]: W1124 08:53:12.554762 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5daf94d4_6c69_4b5d_a0e9_ded6ee79cf4c.slice/crio-004d57ba0b76ae2fa915e8efd96ef2fa023202aea5e2d96631efcedadb1b57b2 WatchSource:0}: Error finding container 004d57ba0b76ae2fa915e8efd96ef2fa023202aea5e2d96631efcedadb1b57b2: Status 404 returned error can't find the container with id 004d57ba0b76ae2fa915e8efd96ef2fa023202aea5e2d96631efcedadb1b57b2 Nov 24 08:53:12 crc kubenswrapper[4718]: I1124 08:53:12.949547 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstackclient" event={"ID":"a264ca86-f795-4ba2-945b-e6459a4d1e68","Type":"ContainerStarted","Data":"3ea2cdc0f06352ab9bae5e7a14730ee5c0061f89de2b26e6b2bf3eeeb9a53461"} Nov 24 08:53:12 crc kubenswrapper[4718]: I1124 08:53:12.955234 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-tcnh5" event={"ID":"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c","Type":"ContainerStarted","Data":"004d57ba0b76ae2fa915e8efd96ef2fa023202aea5e2d96631efcedadb1b57b2"} Nov 24 08:53:12 crc kubenswrapper[4718]: I1124 08:53:12.969896 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/openstackclient" podStartSLOduration=1.286486427 podStartE2EDuration="8.969863372s" podCreationTimestamp="2025-11-24 08:53:04 +0000 UTC" firstStartedPulling="2025-11-24 08:53:04.632843115 +0000 UTC m=+1056.749134019" lastFinishedPulling="2025-11-24 08:53:12.31622006 +0000 UTC m=+1064.432510964" observedRunningTime="2025-11-24 08:53:12.965485073 +0000 UTC m=+1065.081775997" watchObservedRunningTime="2025-11-24 08:53:12.969863372 +0000 UTC m=+1065.086154296" Nov 24 08:53:24 crc kubenswrapper[4718]: I1124 08:53:24.061731 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-tcnh5" event={"ID":"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c","Type":"ContainerStarted","Data":"9921b7b3f742da7b4785f5794c97d3f3d65aa89a2c464aa055237506fc49da01"} Nov 24 08:53:24 crc kubenswrapper[4718]: I1124 08:53:24.082740 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-db-sync-tcnh5" podStartSLOduration=4.181441926 podStartE2EDuration="15.082720865s" podCreationTimestamp="2025-11-24 08:53:09 +0000 UTC" firstStartedPulling="2025-11-24 08:53:12.558266347 +0000 UTC m=+1064.674557251" lastFinishedPulling="2025-11-24 08:53:23.459545286 +0000 UTC m=+1075.575836190" observedRunningTime="2025-11-24 08:53:24.078545892 +0000 UTC m=+1076.194836806" watchObservedRunningTime="2025-11-24 08:53:24.082720865 +0000 UTC m=+1076.199011769" Nov 24 08:53:32 crc kubenswrapper[4718]: I1124 08:53:32.126996 4718 generic.go:334] "Generic (PLEG): container finished" podID="5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c" containerID="9921b7b3f742da7b4785f5794c97d3f3d65aa89a2c464aa055237506fc49da01" exitCode=0 Nov 24 08:53:32 crc kubenswrapper[4718]: I1124 08:53:32.127530 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-tcnh5" event={"ID":"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c","Type":"ContainerDied","Data":"9921b7b3f742da7b4785f5794c97d3f3d65aa89a2c464aa055237506fc49da01"} Nov 24 08:53:33 crc kubenswrapper[4718]: I1124 08:53:33.393869 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-tcnh5" Nov 24 08:53:33 crc kubenswrapper[4718]: I1124 08:53:33.494396 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c-db-sync-config-data\") pod \"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c\" (UID: \"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c\") " Nov 24 08:53:33 crc kubenswrapper[4718]: I1124 08:53:33.494459 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9t2p9\" (UniqueName: \"kubernetes.io/projected/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c-kube-api-access-9t2p9\") pod \"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c\" (UID: \"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c\") " Nov 24 08:53:33 crc kubenswrapper[4718]: I1124 08:53:33.494547 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c-config-data\") pod \"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c\" (UID: \"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c\") " Nov 24 08:53:33 crc kubenswrapper[4718]: I1124 08:53:33.500506 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c" (UID: "5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:53:33 crc kubenswrapper[4718]: I1124 08:53:33.501122 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c-kube-api-access-9t2p9" (OuterVolumeSpecName: "kube-api-access-9t2p9") pod "5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c" (UID: "5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c"). InnerVolumeSpecName "kube-api-access-9t2p9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:53:33 crc kubenswrapper[4718]: I1124 08:53:33.536678 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c-config-data" (OuterVolumeSpecName: "config-data") pod "5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c" (UID: "5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:53:33 crc kubenswrapper[4718]: I1124 08:53:33.596112 4718 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:33 crc kubenswrapper[4718]: I1124 08:53:33.596210 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9t2p9\" (UniqueName: \"kubernetes.io/projected/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c-kube-api-access-9t2p9\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:33 crc kubenswrapper[4718]: I1124 08:53:33.596294 4718 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:34 crc kubenswrapper[4718]: I1124 08:53:34.145120 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-tcnh5" event={"ID":"5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c","Type":"ContainerDied","Data":"004d57ba0b76ae2fa915e8efd96ef2fa023202aea5e2d96631efcedadb1b57b2"} Nov 24 08:53:34 crc kubenswrapper[4718]: I1124 08:53:34.145167 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="004d57ba0b76ae2fa915e8efd96ef2fa023202aea5e2d96631efcedadb1b57b2" Nov 24 08:53:34 crc kubenswrapper[4718]: I1124 08:53:34.145213 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-tcnh5" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.459279 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:53:35 crc kubenswrapper[4718]: E1124 08:53:35.459905 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c" containerName="glance-db-sync" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.459922 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c" containerName="glance-db-sync" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.460100 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c" containerName="glance-db-sync" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.461064 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.463244 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-scripts" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.463568 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-single-config-data" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.472834 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-scpbq" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.478704 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.499488 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.500905 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.521821 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.627398 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-dev\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.627447 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.627477 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52dd0eda-7441-4761-9f02-2e800c3c752c-config-data\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.627502 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-etc-nvme\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.627653 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25x6c\" (UniqueName: \"kubernetes.io/projected/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-kube-api-access-25x6c\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.627715 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-config-data\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.627737 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.627799 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-etc-iscsi\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.627862 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-scripts\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.627886 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-var-locks-brick\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.627905 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-etc-iscsi\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.627921 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xnkv\" (UniqueName: \"kubernetes.io/projected/52dd0eda-7441-4761-9f02-2e800c3c752c-kube-api-access-2xnkv\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.627944 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-sys\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.627993 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-httpd-run\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.628072 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-var-locks-brick\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.628100 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-lib-modules\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.628141 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-sys\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.628174 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-dev\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.628198 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-logs\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.628230 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-lib-modules\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.628255 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-run\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.628285 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52dd0eda-7441-4761-9f02-2e800c3c752c-scripts\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.628314 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.628357 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.628382 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-run\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.628448 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52dd0eda-7441-4761-9f02-2e800c3c752c-logs\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.628513 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/52dd0eda-7441-4761-9f02-2e800c3c752c-httpd-run\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.628546 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-etc-nvme\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730344 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-etc-nvme\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730396 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25x6c\" (UniqueName: \"kubernetes.io/projected/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-kube-api-access-25x6c\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730430 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-config-data\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730451 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730473 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-etc-iscsi\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730494 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-scripts\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730512 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-etc-iscsi\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730528 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-var-locks-brick\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730544 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xnkv\" (UniqueName: \"kubernetes.io/projected/52dd0eda-7441-4761-9f02-2e800c3c752c-kube-api-access-2xnkv\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730561 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-sys\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730575 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-httpd-run\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730596 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-var-locks-brick\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730613 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-lib-modules\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730633 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-sys\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730652 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-dev\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730666 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-logs\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730687 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-lib-modules\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730710 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-run\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730734 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52dd0eda-7441-4761-9f02-2e800c3c752c-scripts\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730758 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730778 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730796 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-run\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730824 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52dd0eda-7441-4761-9f02-2e800c3c752c-logs\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730874 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/52dd0eda-7441-4761-9f02-2e800c3c752c-httpd-run\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730901 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-etc-nvme\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730919 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-dev\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730936 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.730954 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52dd0eda-7441-4761-9f02-2e800c3c752c-config-data\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.731694 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-sys\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.731763 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-sys\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.731816 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-etc-iscsi\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.731877 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-run\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.732030 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-dev\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.732066 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-var-locks-brick\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.732066 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-var-locks-brick\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.732118 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-dev\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.732262 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-httpd-run\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.732276 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") device mount path \"/mnt/openstack/pv08\"" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.732264 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") device mount path \"/mnt/openstack/pv04\"" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.732277 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/52dd0eda-7441-4761-9f02-2e800c3c752c-httpd-run\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.732340 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-lib-modules\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.732497 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-logs\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.732677 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-lib-modules\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.732708 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") device mount path \"/mnt/openstack/pv07\"" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.732711 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-etc-iscsi\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.732712 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") device mount path \"/mnt/openstack/pv09\"" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.732751 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-run\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.732771 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-etc-nvme\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.732892 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-etc-nvme\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.733165 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52dd0eda-7441-4761-9f02-2e800c3c752c-logs\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.736361 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-scripts\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.738304 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52dd0eda-7441-4761-9f02-2e800c3c752c-scripts\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.739469 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-config-data\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.740550 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52dd0eda-7441-4761-9f02-2e800c3c752c-config-data\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.757156 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.757939 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.762398 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25x6c\" (UniqueName: \"kubernetes.io/projected/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-kube-api-access-25x6c\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.762476 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xnkv\" (UniqueName: \"kubernetes.io/projected/52dd0eda-7441-4761-9f02-2e800c3c752c-kube-api-access-2xnkv\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.766112 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-single-0\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.775919 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.785362 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-single-1\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:35 crc kubenswrapper[4718]: I1124 08:53:35.817617 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:36 crc kubenswrapper[4718]: I1124 08:53:36.234735 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:53:36 crc kubenswrapper[4718]: I1124 08:53:36.281155 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Nov 24 08:53:36 crc kubenswrapper[4718]: W1124 08:53:36.297579 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod52dd0eda_7441_4761_9f02_2e800c3c752c.slice/crio-c2934f3237547d0423938a898ebe6d7bd19c00160f573e1ed31dcc50334a081c WatchSource:0}: Error finding container c2934f3237547d0423938a898ebe6d7bd19c00160f573e1ed31dcc50334a081c: Status 404 returned error can't find the container with id c2934f3237547d0423938a898ebe6d7bd19c00160f573e1ed31dcc50334a081c Nov 24 08:53:36 crc kubenswrapper[4718]: I1124 08:53:36.470939 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Nov 24 08:53:37 crc kubenswrapper[4718]: I1124 08:53:37.172548 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"1eeadebf-ca42-46de-a997-2c8cb8a82bb2","Type":"ContainerStarted","Data":"59186eeb38ef6752658b3280f4c685217ece6aecc19601a22a53b7ba25def702"} Nov 24 08:53:37 crc kubenswrapper[4718]: I1124 08:53:37.172881 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"1eeadebf-ca42-46de-a997-2c8cb8a82bb2","Type":"ContainerStarted","Data":"c82184e92fbcb973fbf847f5e97843b0e42c8324cc10562a1f9bc547c570cfd7"} Nov 24 08:53:37 crc kubenswrapper[4718]: I1124 08:53:37.174603 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"52dd0eda-7441-4761-9f02-2e800c3c752c","Type":"ContainerStarted","Data":"bcdd0982f2a8f910ab4212d057a2fb959d9f13e6b39e75b48a9501adff044f0a"} Nov 24 08:53:37 crc kubenswrapper[4718]: I1124 08:53:37.174736 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"52dd0eda-7441-4761-9f02-2e800c3c752c","Type":"ContainerStarted","Data":"c2934f3237547d0423938a898ebe6d7bd19c00160f573e1ed31dcc50334a081c"} Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.183153 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"1eeadebf-ca42-46de-a997-2c8cb8a82bb2","Type":"ContainerStarted","Data":"09d10e9e37d3e5786b71fe65f5c9128341f41bb4a04d831baad280fff70dee65"} Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.188630 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"52dd0eda-7441-4761-9f02-2e800c3c752c","Type":"ContainerStarted","Data":"87109bd6bd558642ec4671a864b749eeb90be6c625d718f7dcfd938954015217"} Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.188774 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-1" podUID="52dd0eda-7441-4761-9f02-2e800c3c752c" containerName="glance-log" containerID="cri-o://bcdd0982f2a8f910ab4212d057a2fb959d9f13e6b39e75b48a9501adff044f0a" gracePeriod=30 Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.189063 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-1" podUID="52dd0eda-7441-4761-9f02-2e800c3c752c" containerName="glance-httpd" containerID="cri-o://87109bd6bd558642ec4671a864b749eeb90be6c625d718f7dcfd938954015217" gracePeriod=30 Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.220432 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-single-0" podStartSLOduration=4.220414924 podStartE2EDuration="4.220414924s" podCreationTimestamp="2025-11-24 08:53:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:53:38.208687475 +0000 UTC m=+1090.324978379" watchObservedRunningTime="2025-11-24 08:53:38.220414924 +0000 UTC m=+1090.336705828" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.239526 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-single-1" podStartSLOduration=4.239506376 podStartE2EDuration="4.239506376s" podCreationTimestamp="2025-11-24 08:53:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:53:38.23764643 +0000 UTC m=+1090.353937344" watchObservedRunningTime="2025-11-24 08:53:38.239506376 +0000 UTC m=+1090.355797280" Nov 24 08:53:38 crc kubenswrapper[4718]: W1124 08:53:38.490086 4718 watcher.go:93] Error while processing event ("/sys/fs/cgroup/user.slice/user-0.slice/session-c4.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/user.slice/user-0.slice/session-c4.scope: no such file or directory Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.618001 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.674958 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52dd0eda-7441-4761-9f02-2e800c3c752c-scripts\") pod \"52dd0eda-7441-4761-9f02-2e800c3c752c\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.675392 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-var-locks-brick\") pod \"52dd0eda-7441-4761-9f02-2e800c3c752c\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.675457 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-run\") pod \"52dd0eda-7441-4761-9f02-2e800c3c752c\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.675470 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "52dd0eda-7441-4761-9f02-2e800c3c752c" (UID: "52dd0eda-7441-4761-9f02-2e800c3c752c"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.675483 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xnkv\" (UniqueName: \"kubernetes.io/projected/52dd0eda-7441-4761-9f02-2e800c3c752c-kube-api-access-2xnkv\") pod \"52dd0eda-7441-4761-9f02-2e800c3c752c\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.675538 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52dd0eda-7441-4761-9f02-2e800c3c752c-logs\") pod \"52dd0eda-7441-4761-9f02-2e800c3c752c\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.675560 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52dd0eda-7441-4761-9f02-2e800c3c752c-config-data\") pod \"52dd0eda-7441-4761-9f02-2e800c3c752c\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.675583 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-lib-modules\") pod \"52dd0eda-7441-4761-9f02-2e800c3c752c\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.675635 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-dev\") pod \"52dd0eda-7441-4761-9f02-2e800c3c752c\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.675649 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance-cache\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"52dd0eda-7441-4761-9f02-2e800c3c752c\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.675712 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/52dd0eda-7441-4761-9f02-2e800c3c752c-httpd-run\") pod \"52dd0eda-7441-4761-9f02-2e800c3c752c\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.675741 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"52dd0eda-7441-4761-9f02-2e800c3c752c\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.675744 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "52dd0eda-7441-4761-9f02-2e800c3c752c" (UID: "52dd0eda-7441-4761-9f02-2e800c3c752c"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.675772 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-sys\") pod \"52dd0eda-7441-4761-9f02-2e800c3c752c\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.675798 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-sys" (OuterVolumeSpecName: "sys") pod "52dd0eda-7441-4761-9f02-2e800c3c752c" (UID: "52dd0eda-7441-4761-9f02-2e800c3c752c"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.675861 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-etc-nvme\") pod \"52dd0eda-7441-4761-9f02-2e800c3c752c\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.675915 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-etc-iscsi\") pod \"52dd0eda-7441-4761-9f02-2e800c3c752c\" (UID: \"52dd0eda-7441-4761-9f02-2e800c3c752c\") " Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.676024 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52dd0eda-7441-4761-9f02-2e800c3c752c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "52dd0eda-7441-4761-9f02-2e800c3c752c" (UID: "52dd0eda-7441-4761-9f02-2e800c3c752c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.676030 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52dd0eda-7441-4761-9f02-2e800c3c752c-logs" (OuterVolumeSpecName: "logs") pod "52dd0eda-7441-4761-9f02-2e800c3c752c" (UID: "52dd0eda-7441-4761-9f02-2e800c3c752c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.676111 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-dev" (OuterVolumeSpecName: "dev") pod "52dd0eda-7441-4761-9f02-2e800c3c752c" (UID: "52dd0eda-7441-4761-9f02-2e800c3c752c"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.676143 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "52dd0eda-7441-4761-9f02-2e800c3c752c" (UID: "52dd0eda-7441-4761-9f02-2e800c3c752c"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.676169 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "52dd0eda-7441-4761-9f02-2e800c3c752c" (UID: "52dd0eda-7441-4761-9f02-2e800c3c752c"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.676192 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-run" (OuterVolumeSpecName: "run") pod "52dd0eda-7441-4761-9f02-2e800c3c752c" (UID: "52dd0eda-7441-4761-9f02-2e800c3c752c"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.676529 4718 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-var-locks-brick\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.676543 4718 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-run\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.676555 4718 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52dd0eda-7441-4761-9f02-2e800c3c752c-logs\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.676563 4718 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-lib-modules\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.676571 4718 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-dev\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.676579 4718 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/52dd0eda-7441-4761-9f02-2e800c3c752c-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.676605 4718 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-sys\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.676614 4718 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-etc-nvme\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.676624 4718 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/52dd0eda-7441-4761-9f02-2e800c3c752c-etc-iscsi\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.680703 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52dd0eda-7441-4761-9f02-2e800c3c752c-kube-api-access-2xnkv" (OuterVolumeSpecName: "kube-api-access-2xnkv") pod "52dd0eda-7441-4761-9f02-2e800c3c752c" (UID: "52dd0eda-7441-4761-9f02-2e800c3c752c"). InnerVolumeSpecName "kube-api-access-2xnkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.684122 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52dd0eda-7441-4761-9f02-2e800c3c752c-scripts" (OuterVolumeSpecName: "scripts") pod "52dd0eda-7441-4761-9f02-2e800c3c752c" (UID: "52dd0eda-7441-4761-9f02-2e800c3c752c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.684300 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance-cache") pod "52dd0eda-7441-4761-9f02-2e800c3c752c" (UID: "52dd0eda-7441-4761-9f02-2e800c3c752c"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.686103 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "52dd0eda-7441-4761-9f02-2e800c3c752c" (UID: "52dd0eda-7441-4761-9f02-2e800c3c752c"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.721682 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52dd0eda-7441-4761-9f02-2e800c3c752c-config-data" (OuterVolumeSpecName: "config-data") pod "52dd0eda-7441-4761-9f02-2e800c3c752c" (UID: "52dd0eda-7441-4761-9f02-2e800c3c752c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.778479 4718 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52dd0eda-7441-4761-9f02-2e800c3c752c-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.778568 4718 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.778584 4718 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.778596 4718 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52dd0eda-7441-4761-9f02-2e800c3c752c-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.778612 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xnkv\" (UniqueName: \"kubernetes.io/projected/52dd0eda-7441-4761-9f02-2e800c3c752c-kube-api-access-2xnkv\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.793718 4718 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.796536 4718 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.880355 4718 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:38 crc kubenswrapper[4718]: I1124 08:53:38.880396 4718 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.198065 4718 generic.go:334] "Generic (PLEG): container finished" podID="52dd0eda-7441-4761-9f02-2e800c3c752c" containerID="87109bd6bd558642ec4671a864b749eeb90be6c625d718f7dcfd938954015217" exitCode=143 Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.198130 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.198155 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"52dd0eda-7441-4761-9f02-2e800c3c752c","Type":"ContainerDied","Data":"87109bd6bd558642ec4671a864b749eeb90be6c625d718f7dcfd938954015217"} Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.198213 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"52dd0eda-7441-4761-9f02-2e800c3c752c","Type":"ContainerDied","Data":"bcdd0982f2a8f910ab4212d057a2fb959d9f13e6b39e75b48a9501adff044f0a"} Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.198232 4718 scope.go:117] "RemoveContainer" containerID="87109bd6bd558642ec4671a864b749eeb90be6c625d718f7dcfd938954015217" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.198137 4718 generic.go:334] "Generic (PLEG): container finished" podID="52dd0eda-7441-4761-9f02-2e800c3c752c" containerID="bcdd0982f2a8f910ab4212d057a2fb959d9f13e6b39e75b48a9501adff044f0a" exitCode=143 Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.198485 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"52dd0eda-7441-4761-9f02-2e800c3c752c","Type":"ContainerDied","Data":"c2934f3237547d0423938a898ebe6d7bd19c00160f573e1ed31dcc50334a081c"} Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.234258 4718 scope.go:117] "RemoveContainer" containerID="bcdd0982f2a8f910ab4212d057a2fb959d9f13e6b39e75b48a9501adff044f0a" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.239992 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.249661 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.256366 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Nov 24 08:53:39 crc kubenswrapper[4718]: E1124 08:53:39.256784 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52dd0eda-7441-4761-9f02-2e800c3c752c" containerName="glance-httpd" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.256802 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="52dd0eda-7441-4761-9f02-2e800c3c752c" containerName="glance-httpd" Nov 24 08:53:39 crc kubenswrapper[4718]: E1124 08:53:39.256836 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52dd0eda-7441-4761-9f02-2e800c3c752c" containerName="glance-log" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.256846 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="52dd0eda-7441-4761-9f02-2e800c3c752c" containerName="glance-log" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.257068 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="52dd0eda-7441-4761-9f02-2e800c3c752c" containerName="glance-log" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.257136 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="52dd0eda-7441-4761-9f02-2e800c3c752c" containerName="glance-httpd" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.260228 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.264581 4718 scope.go:117] "RemoveContainer" containerID="87109bd6bd558642ec4671a864b749eeb90be6c625d718f7dcfd938954015217" Nov 24 08:53:39 crc kubenswrapper[4718]: E1124 08:53:39.265823 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87109bd6bd558642ec4671a864b749eeb90be6c625d718f7dcfd938954015217\": container with ID starting with 87109bd6bd558642ec4671a864b749eeb90be6c625d718f7dcfd938954015217 not found: ID does not exist" containerID="87109bd6bd558642ec4671a864b749eeb90be6c625d718f7dcfd938954015217" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.265873 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87109bd6bd558642ec4671a864b749eeb90be6c625d718f7dcfd938954015217"} err="failed to get container status \"87109bd6bd558642ec4671a864b749eeb90be6c625d718f7dcfd938954015217\": rpc error: code = NotFound desc = could not find container \"87109bd6bd558642ec4671a864b749eeb90be6c625d718f7dcfd938954015217\": container with ID starting with 87109bd6bd558642ec4671a864b749eeb90be6c625d718f7dcfd938954015217 not found: ID does not exist" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.265919 4718 scope.go:117] "RemoveContainer" containerID="bcdd0982f2a8f910ab4212d057a2fb959d9f13e6b39e75b48a9501adff044f0a" Nov 24 08:53:39 crc kubenswrapper[4718]: E1124 08:53:39.266442 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bcdd0982f2a8f910ab4212d057a2fb959d9f13e6b39e75b48a9501adff044f0a\": container with ID starting with bcdd0982f2a8f910ab4212d057a2fb959d9f13e6b39e75b48a9501adff044f0a not found: ID does not exist" containerID="bcdd0982f2a8f910ab4212d057a2fb959d9f13e6b39e75b48a9501adff044f0a" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.266465 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcdd0982f2a8f910ab4212d057a2fb959d9f13e6b39e75b48a9501adff044f0a"} err="failed to get container status \"bcdd0982f2a8f910ab4212d057a2fb959d9f13e6b39e75b48a9501adff044f0a\": rpc error: code = NotFound desc = could not find container \"bcdd0982f2a8f910ab4212d057a2fb959d9f13e6b39e75b48a9501adff044f0a\": container with ID starting with bcdd0982f2a8f910ab4212d057a2fb959d9f13e6b39e75b48a9501adff044f0a not found: ID does not exist" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.266482 4718 scope.go:117] "RemoveContainer" containerID="87109bd6bd558642ec4671a864b749eeb90be6c625d718f7dcfd938954015217" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.268304 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87109bd6bd558642ec4671a864b749eeb90be6c625d718f7dcfd938954015217"} err="failed to get container status \"87109bd6bd558642ec4671a864b749eeb90be6c625d718f7dcfd938954015217\": rpc error: code = NotFound desc = could not find container \"87109bd6bd558642ec4671a864b749eeb90be6c625d718f7dcfd938954015217\": container with ID starting with 87109bd6bd558642ec4671a864b749eeb90be6c625d718f7dcfd938954015217 not found: ID does not exist" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.268330 4718 scope.go:117] "RemoveContainer" containerID="bcdd0982f2a8f910ab4212d057a2fb959d9f13e6b39e75b48a9501adff044f0a" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.273633 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcdd0982f2a8f910ab4212d057a2fb959d9f13e6b39e75b48a9501adff044f0a"} err="failed to get container status \"bcdd0982f2a8f910ab4212d057a2fb959d9f13e6b39e75b48a9501adff044f0a\": rpc error: code = NotFound desc = could not find container \"bcdd0982f2a8f910ab4212d057a2fb959d9f13e6b39e75b48a9501adff044f0a\": container with ID starting with bcdd0982f2a8f910ab4212d057a2fb959d9f13e6b39e75b48a9501adff044f0a not found: ID does not exist" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.276094 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.386435 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-logs\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.386497 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-lib-modules\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.386668 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-etc-nvme\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.386732 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-dev\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.386757 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-httpd-run\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.386823 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-config-data\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.386947 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-etc-iscsi\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.386998 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-var-locks-brick\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.387056 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.387086 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pg4p9\" (UniqueName: \"kubernetes.io/projected/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-kube-api-access-pg4p9\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.387123 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-sys\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.387145 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-run\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.387168 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-scripts\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.387348 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.488552 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-httpd-run\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.488616 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-config-data\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.488672 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-etc-iscsi\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.488697 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-var-locks-brick\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.488737 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.488761 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pg4p9\" (UniqueName: \"kubernetes.io/projected/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-kube-api-access-pg4p9\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.488805 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-etc-iscsi\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.488893 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-var-locks-brick\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.488957 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-sys\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.488909 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-sys\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.489079 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-run\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.488957 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") device mount path \"/mnt/openstack/pv07\"" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.489107 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-scripts\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.489783 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.489140 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-run\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.489204 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-httpd-run\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.489856 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-logs\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.489904 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-lib-modules\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.489917 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") device mount path \"/mnt/openstack/pv04\"" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.489942 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-etc-nvme\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.489952 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-lib-modules\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.490029 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-dev\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.490163 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-dev\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.490198 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-logs\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.490231 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-etc-nvme\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.495612 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-config-data\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.496921 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-scripts\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.506408 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pg4p9\" (UniqueName: \"kubernetes.io/projected/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-kube-api-access-pg4p9\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.511734 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.511763 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-single-1\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.576879 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:39 crc kubenswrapper[4718]: I1124 08:53:39.984686 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Nov 24 08:53:40 crc kubenswrapper[4718]: I1124 08:53:40.206952 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"94bc7b3f-a682-4f02-a7e3-efb363bf98f3","Type":"ContainerStarted","Data":"89cc5e8c573fe4f0234a3fd8566c04476639efb434d02259adfde1a69af995c8"} Nov 24 08:53:40 crc kubenswrapper[4718]: I1124 08:53:40.207017 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"94bc7b3f-a682-4f02-a7e3-efb363bf98f3","Type":"ContainerStarted","Data":"a8d2d25ec07af4dc2b89e88e70accd191d93df97253a3298c233cfb58c3f51b4"} Nov 24 08:53:40 crc kubenswrapper[4718]: I1124 08:53:40.607495 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52dd0eda-7441-4761-9f02-2e800c3c752c" path="/var/lib/kubelet/pods/52dd0eda-7441-4761-9f02-2e800c3c752c/volumes" Nov 24 08:53:41 crc kubenswrapper[4718]: I1124 08:53:41.218373 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"94bc7b3f-a682-4f02-a7e3-efb363bf98f3","Type":"ContainerStarted","Data":"ac77ece9bf98ea30734caded780705db9bd3fc98b7610deb3a7f3907d5fa5fe3"} Nov 24 08:53:41 crc kubenswrapper[4718]: I1124 08:53:41.244107 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-single-1" podStartSLOduration=2.244088541 podStartE2EDuration="2.244088541s" podCreationTimestamp="2025-11-24 08:53:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:53:41.23916686 +0000 UTC m=+1093.355457764" watchObservedRunningTime="2025-11-24 08:53:41.244088541 +0000 UTC m=+1093.360379445" Nov 24 08:53:45 crc kubenswrapper[4718]: I1124 08:53:45.776202 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:45 crc kubenswrapper[4718]: I1124 08:53:45.776586 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:45 crc kubenswrapper[4718]: I1124 08:53:45.799219 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:45 crc kubenswrapper[4718]: I1124 08:53:45.819667 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:46 crc kubenswrapper[4718]: I1124 08:53:46.258223 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:46 crc kubenswrapper[4718]: I1124 08:53:46.258257 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:48 crc kubenswrapper[4718]: I1124 08:53:48.271411 4718 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 08:53:48 crc kubenswrapper[4718]: I1124 08:53:48.271734 4718 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 08:53:48 crc kubenswrapper[4718]: I1124 08:53:48.313208 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:48 crc kubenswrapper[4718]: I1124 08:53:48.349002 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:49 crc kubenswrapper[4718]: I1124 08:53:49.578163 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:49 crc kubenswrapper[4718]: I1124 08:53:49.578516 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:49 crc kubenswrapper[4718]: I1124 08:53:49.607705 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:49 crc kubenswrapper[4718]: I1124 08:53:49.617273 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:50 crc kubenswrapper[4718]: I1124 08:53:50.289698 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:50 crc kubenswrapper[4718]: I1124 08:53:50.289759 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:52 crc kubenswrapper[4718]: I1124 08:53:52.045344 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:53:52 crc kubenswrapper[4718]: I1124 08:53:52.045880 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:53:52 crc kubenswrapper[4718]: I1124 08:53:52.310329 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:52 crc kubenswrapper[4718]: I1124 08:53:52.310482 4718 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 08:53:52 crc kubenswrapper[4718]: I1124 08:53:52.321504 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:53:52 crc kubenswrapper[4718]: I1124 08:53:52.374261 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:53:52 crc kubenswrapper[4718]: I1124 08:53:52.374514 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="1eeadebf-ca42-46de-a997-2c8cb8a82bb2" containerName="glance-log" containerID="cri-o://59186eeb38ef6752658b3280f4c685217ece6aecc19601a22a53b7ba25def702" gracePeriod=30 Nov 24 08:53:52 crc kubenswrapper[4718]: I1124 08:53:52.374623 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="1eeadebf-ca42-46de-a997-2c8cb8a82bb2" containerName="glance-httpd" containerID="cri-o://09d10e9e37d3e5786b71fe65f5c9128341f41bb4a04d831baad280fff70dee65" gracePeriod=30 Nov 24 08:53:53 crc kubenswrapper[4718]: I1124 08:53:53.312467 4718 generic.go:334] "Generic (PLEG): container finished" podID="1eeadebf-ca42-46de-a997-2c8cb8a82bb2" containerID="59186eeb38ef6752658b3280f4c685217ece6aecc19601a22a53b7ba25def702" exitCode=143 Nov 24 08:53:53 crc kubenswrapper[4718]: I1124 08:53:53.312568 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"1eeadebf-ca42-46de-a997-2c8cb8a82bb2","Type":"ContainerDied","Data":"59186eeb38ef6752658b3280f4c685217ece6aecc19601a22a53b7ba25def702"} Nov 24 08:53:55 crc kubenswrapper[4718]: I1124 08:53:55.912680 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.043782 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-config-data\") pod \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.043828 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25x6c\" (UniqueName: \"kubernetes.io/projected/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-kube-api-access-25x6c\") pod \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.043871 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-etc-iscsi\") pod \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.043902 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-logs\") pod \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.043926 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-sys\") pod \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.043938 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-lib-modules\") pod \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.043962 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-var-locks-brick\") pod \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.043990 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-etc-nvme\") pod \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.044004 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance-cache\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.044022 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-run\") pod \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.044074 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-dev\") pod \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.044109 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-httpd-run\") pod \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.044124 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.044152 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-scripts\") pod \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\" (UID: \"1eeadebf-ca42-46de-a997-2c8cb8a82bb2\") " Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.045161 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "1eeadebf-ca42-46de-a997-2c8cb8a82bb2" (UID: "1eeadebf-ca42-46de-a997-2c8cb8a82bb2"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.045345 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "1eeadebf-ca42-46de-a997-2c8cb8a82bb2" (UID: "1eeadebf-ca42-46de-a997-2c8cb8a82bb2"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.045366 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-sys" (OuterVolumeSpecName: "sys") pod "1eeadebf-ca42-46de-a997-2c8cb8a82bb2" (UID: "1eeadebf-ca42-46de-a997-2c8cb8a82bb2"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.045416 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "1eeadebf-ca42-46de-a997-2c8cb8a82bb2" (UID: "1eeadebf-ca42-46de-a997-2c8cb8a82bb2"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.045456 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "1eeadebf-ca42-46de-a997-2c8cb8a82bb2" (UID: "1eeadebf-ca42-46de-a997-2c8cb8a82bb2"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.045510 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-dev" (OuterVolumeSpecName: "dev") pod "1eeadebf-ca42-46de-a997-2c8cb8a82bb2" (UID: "1eeadebf-ca42-46de-a997-2c8cb8a82bb2"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.045546 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-run" (OuterVolumeSpecName: "run") pod "1eeadebf-ca42-46de-a997-2c8cb8a82bb2" (UID: "1eeadebf-ca42-46de-a997-2c8cb8a82bb2"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.045666 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "1eeadebf-ca42-46de-a997-2c8cb8a82bb2" (UID: "1eeadebf-ca42-46de-a997-2c8cb8a82bb2"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.046191 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-logs" (OuterVolumeSpecName: "logs") pod "1eeadebf-ca42-46de-a997-2c8cb8a82bb2" (UID: "1eeadebf-ca42-46de-a997-2c8cb8a82bb2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.050566 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance-cache") pod "1eeadebf-ca42-46de-a997-2c8cb8a82bb2" (UID: "1eeadebf-ca42-46de-a997-2c8cb8a82bb2"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.050626 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-kube-api-access-25x6c" (OuterVolumeSpecName: "kube-api-access-25x6c") pod "1eeadebf-ca42-46de-a997-2c8cb8a82bb2" (UID: "1eeadebf-ca42-46de-a997-2c8cb8a82bb2"). InnerVolumeSpecName "kube-api-access-25x6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.050833 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-scripts" (OuterVolumeSpecName: "scripts") pod "1eeadebf-ca42-46de-a997-2c8cb8a82bb2" (UID: "1eeadebf-ca42-46de-a997-2c8cb8a82bb2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.051193 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "1eeadebf-ca42-46de-a997-2c8cb8a82bb2" (UID: "1eeadebf-ca42-46de-a997-2c8cb8a82bb2"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.085768 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-config-data" (OuterVolumeSpecName: "config-data") pod "1eeadebf-ca42-46de-a997-2c8cb8a82bb2" (UID: "1eeadebf-ca42-46de-a997-2c8cb8a82bb2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.145603 4718 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.145638 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25x6c\" (UniqueName: \"kubernetes.io/projected/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-kube-api-access-25x6c\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.145648 4718 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-etc-iscsi\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.145657 4718 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-logs\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.145670 4718 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-sys\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.145687 4718 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-lib-modules\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.145698 4718 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-var-locks-brick\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.145721 4718 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-etc-nvme\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.145760 4718 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.145772 4718 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-run\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.145784 4718 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-dev\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.145796 4718 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.145820 4718 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.145831 4718 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1eeadebf-ca42-46de-a997-2c8cb8a82bb2-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.159013 4718 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.160141 4718 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.247134 4718 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.247169 4718 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.334817 4718 generic.go:334] "Generic (PLEG): container finished" podID="1eeadebf-ca42-46de-a997-2c8cb8a82bb2" containerID="09d10e9e37d3e5786b71fe65f5c9128341f41bb4a04d831baad280fff70dee65" exitCode=0 Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.334859 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"1eeadebf-ca42-46de-a997-2c8cb8a82bb2","Type":"ContainerDied","Data":"09d10e9e37d3e5786b71fe65f5c9128341f41bb4a04d831baad280fff70dee65"} Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.334901 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"1eeadebf-ca42-46de-a997-2c8cb8a82bb2","Type":"ContainerDied","Data":"c82184e92fbcb973fbf847f5e97843b0e42c8324cc10562a1f9bc547c570cfd7"} Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.334927 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.334924 4718 scope.go:117] "RemoveContainer" containerID="09d10e9e37d3e5786b71fe65f5c9128341f41bb4a04d831baad280fff70dee65" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.356835 4718 scope.go:117] "RemoveContainer" containerID="59186eeb38ef6752658b3280f4c685217ece6aecc19601a22a53b7ba25def702" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.367446 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.375562 4718 scope.go:117] "RemoveContainer" containerID="09d10e9e37d3e5786b71fe65f5c9128341f41bb4a04d831baad280fff70dee65" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.380683 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:53:56 crc kubenswrapper[4718]: E1124 08:53:56.381150 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09d10e9e37d3e5786b71fe65f5c9128341f41bb4a04d831baad280fff70dee65\": container with ID starting with 09d10e9e37d3e5786b71fe65f5c9128341f41bb4a04d831baad280fff70dee65 not found: ID does not exist" containerID="09d10e9e37d3e5786b71fe65f5c9128341f41bb4a04d831baad280fff70dee65" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.381201 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09d10e9e37d3e5786b71fe65f5c9128341f41bb4a04d831baad280fff70dee65"} err="failed to get container status \"09d10e9e37d3e5786b71fe65f5c9128341f41bb4a04d831baad280fff70dee65\": rpc error: code = NotFound desc = could not find container \"09d10e9e37d3e5786b71fe65f5c9128341f41bb4a04d831baad280fff70dee65\": container with ID starting with 09d10e9e37d3e5786b71fe65f5c9128341f41bb4a04d831baad280fff70dee65 not found: ID does not exist" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.381232 4718 scope.go:117] "RemoveContainer" containerID="59186eeb38ef6752658b3280f4c685217ece6aecc19601a22a53b7ba25def702" Nov 24 08:53:56 crc kubenswrapper[4718]: E1124 08:53:56.381720 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59186eeb38ef6752658b3280f4c685217ece6aecc19601a22a53b7ba25def702\": container with ID starting with 59186eeb38ef6752658b3280f4c685217ece6aecc19601a22a53b7ba25def702 not found: ID does not exist" containerID="59186eeb38ef6752658b3280f4c685217ece6aecc19601a22a53b7ba25def702" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.381770 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59186eeb38ef6752658b3280f4c685217ece6aecc19601a22a53b7ba25def702"} err="failed to get container status \"59186eeb38ef6752658b3280f4c685217ece6aecc19601a22a53b7ba25def702\": rpc error: code = NotFound desc = could not find container \"59186eeb38ef6752658b3280f4c685217ece6aecc19601a22a53b7ba25def702\": container with ID starting with 59186eeb38ef6752658b3280f4c685217ece6aecc19601a22a53b7ba25def702 not found: ID does not exist" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.397054 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:53:56 crc kubenswrapper[4718]: E1124 08:53:56.397358 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eeadebf-ca42-46de-a997-2c8cb8a82bb2" containerName="glance-log" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.397374 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eeadebf-ca42-46de-a997-2c8cb8a82bb2" containerName="glance-log" Nov 24 08:53:56 crc kubenswrapper[4718]: E1124 08:53:56.397391 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eeadebf-ca42-46de-a997-2c8cb8a82bb2" containerName="glance-httpd" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.397398 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eeadebf-ca42-46de-a997-2c8cb8a82bb2" containerName="glance-httpd" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.397519 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="1eeadebf-ca42-46de-a997-2c8cb8a82bb2" containerName="glance-log" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.397535 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="1eeadebf-ca42-46de-a997-2c8cb8a82bb2" containerName="glance-httpd" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.398360 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.414854 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.449817 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-var-locks-brick\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.449871 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/22f63ca4-747e-4636-be43-790841b0b0ab-httpd-run\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.449893 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2nzd\" (UniqueName: \"kubernetes.io/projected/22f63ca4-747e-4636-be43-790841b0b0ab-kube-api-access-l2nzd\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.449912 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.449933 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-dev\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.449962 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-etc-iscsi\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.449994 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-run\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.450027 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22f63ca4-747e-4636-be43-790841b0b0ab-scripts\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.450221 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-sys\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.450252 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.450286 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-lib-modules\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.450312 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-etc-nvme\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.450350 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22f63ca4-747e-4636-be43-790841b0b0ab-config-data\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.450382 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22f63ca4-747e-4636-be43-790841b0b0ab-logs\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.552066 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.552106 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-lib-modules\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.552130 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-etc-nvme\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.552156 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22f63ca4-747e-4636-be43-790841b0b0ab-config-data\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.552299 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-lib-modules\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.552355 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") device mount path \"/mnt/openstack/pv09\"" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.552370 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22f63ca4-747e-4636-be43-790841b0b0ab-logs\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.552419 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-var-locks-brick\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.552456 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/22f63ca4-747e-4636-be43-790841b0b0ab-httpd-run\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.552439 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-etc-nvme\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.552482 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2nzd\" (UniqueName: \"kubernetes.io/projected/22f63ca4-747e-4636-be43-790841b0b0ab-kube-api-access-l2nzd\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.552556 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.552617 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-dev\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.552699 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-etc-iscsi\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.552733 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-run\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.552779 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22f63ca4-747e-4636-be43-790841b0b0ab-scripts\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.552873 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-sys\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.553008 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-sys\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.553146 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") device mount path \"/mnt/openstack/pv08\"" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.553168 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22f63ca4-747e-4636-be43-790841b0b0ab-logs\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.553227 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-var-locks-brick\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.553659 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/22f63ca4-747e-4636-be43-790841b0b0ab-httpd-run\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.553712 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-etc-iscsi\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.553743 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-dev\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.553851 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-run\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.557088 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22f63ca4-747e-4636-be43-790841b0b0ab-scripts\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.558338 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22f63ca4-747e-4636-be43-790841b0b0ab-config-data\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.589875 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2nzd\" (UniqueName: \"kubernetes.io/projected/22f63ca4-747e-4636-be43-790841b0b0ab-kube-api-access-l2nzd\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.594388 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.594638 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-single-0\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.607346 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1eeadebf-ca42-46de-a997-2c8cb8a82bb2" path="/var/lib/kubelet/pods/1eeadebf-ca42-46de-a997-2c8cb8a82bb2/volumes" Nov 24 08:53:56 crc kubenswrapper[4718]: I1124 08:53:56.712376 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:53:57 crc kubenswrapper[4718]: I1124 08:53:57.151623 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:53:57 crc kubenswrapper[4718]: I1124 08:53:57.342212 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"22f63ca4-747e-4636-be43-790841b0b0ab","Type":"ContainerStarted","Data":"002f676778ae015fed2d16a7b970b1f1552cef535161a5d430f07b310071a102"} Nov 24 08:53:57 crc kubenswrapper[4718]: I1124 08:53:57.342693 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"22f63ca4-747e-4636-be43-790841b0b0ab","Type":"ContainerStarted","Data":"c11f47fdda6eceb095b7daede75a0795c6de7ad04e24a01383615849f5451699"} Nov 24 08:53:58 crc kubenswrapper[4718]: I1124 08:53:58.350553 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"22f63ca4-747e-4636-be43-790841b0b0ab","Type":"ContainerStarted","Data":"5279fb7bd59e41f189c77800fe0ed2a775436d51d8d1c097eda3075adce84469"} Nov 24 08:53:58 crc kubenswrapper[4718]: I1124 08:53:58.373576 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-single-0" podStartSLOduration=2.37355885 podStartE2EDuration="2.37355885s" podCreationTimestamp="2025-11-24 08:53:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:53:58.370060164 +0000 UTC m=+1110.486351068" watchObservedRunningTime="2025-11-24 08:53:58.37355885 +0000 UTC m=+1110.489849754" Nov 24 08:54:06 crc kubenswrapper[4718]: I1124 08:54:06.712767 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:06 crc kubenswrapper[4718]: I1124 08:54:06.714933 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:06 crc kubenswrapper[4718]: I1124 08:54:06.735863 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:06 crc kubenswrapper[4718]: I1124 08:54:06.754065 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:07 crc kubenswrapper[4718]: I1124 08:54:07.411206 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:07 crc kubenswrapper[4718]: I1124 08:54:07.411284 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:09 crc kubenswrapper[4718]: I1124 08:54:09.424750 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:09 crc kubenswrapper[4718]: I1124 08:54:09.425349 4718 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 08:54:09 crc kubenswrapper[4718]: I1124 08:54:09.502612 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.187991 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-db-sync-tcnh5"] Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.192946 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-db-sync-tcnh5"] Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.292961 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.293236 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-1" podUID="94bc7b3f-a682-4f02-a7e3-efb363bf98f3" containerName="glance-log" containerID="cri-o://89cc5e8c573fe4f0234a3fd8566c04476639efb434d02259adfde1a69af995c8" gracePeriod=30 Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.293685 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-1" podUID="94bc7b3f-a682-4f02-a7e3-efb363bf98f3" containerName="glance-httpd" containerID="cri-o://ac77ece9bf98ea30734caded780705db9bd3fc98b7610deb3a7f3907d5fa5fe3" gracePeriod=30 Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.306237 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.306523 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="22f63ca4-747e-4636-be43-790841b0b0ab" containerName="glance-log" containerID="cri-o://002f676778ae015fed2d16a7b970b1f1552cef535161a5d430f07b310071a102" gracePeriod=30 Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.306900 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="22f63ca4-747e-4636-be43-790841b0b0ab" containerName="glance-httpd" containerID="cri-o://5279fb7bd59e41f189c77800fe0ed2a775436d51d8d1c097eda3075adce84469" gracePeriod=30 Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.324158 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glanceaac4-account-delete-89vvx"] Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.325092 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glanceaac4-account-delete-89vvx" Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.330075 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glanceaac4-account-delete-89vvx"] Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.405332 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0-operator-scripts\") pod \"glanceaac4-account-delete-89vvx\" (UID: \"b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0\") " pod="glance-kuttl-tests/glanceaac4-account-delete-89vvx" Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.405563 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qj4k\" (UniqueName: \"kubernetes.io/projected/b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0-kube-api-access-2qj4k\") pod \"glanceaac4-account-delete-89vvx\" (UID: \"b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0\") " pod="glance-kuttl-tests/glanceaac4-account-delete-89vvx" Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.436871 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/openstackclient"] Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.437102 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/openstackclient" podUID="a264ca86-f795-4ba2-945b-e6459a4d1e68" containerName="openstackclient" containerID="cri-o://3ea2cdc0f06352ab9bae5e7a14730ee5c0061f89de2b26e6b2bf3eeeb9a53461" gracePeriod=30 Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.507331 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0-operator-scripts\") pod \"glanceaac4-account-delete-89vvx\" (UID: \"b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0\") " pod="glance-kuttl-tests/glanceaac4-account-delete-89vvx" Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.507784 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qj4k\" (UniqueName: \"kubernetes.io/projected/b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0-kube-api-access-2qj4k\") pod \"glanceaac4-account-delete-89vvx\" (UID: \"b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0\") " pod="glance-kuttl-tests/glanceaac4-account-delete-89vvx" Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.508271 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0-operator-scripts\") pod \"glanceaac4-account-delete-89vvx\" (UID: \"b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0\") " pod="glance-kuttl-tests/glanceaac4-account-delete-89vvx" Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.516602 4718 generic.go:334] "Generic (PLEG): container finished" podID="94bc7b3f-a682-4f02-a7e3-efb363bf98f3" containerID="89cc5e8c573fe4f0234a3fd8566c04476639efb434d02259adfde1a69af995c8" exitCode=143 Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.516667 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"94bc7b3f-a682-4f02-a7e3-efb363bf98f3","Type":"ContainerDied","Data":"89cc5e8c573fe4f0234a3fd8566c04476639efb434d02259adfde1a69af995c8"} Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.518763 4718 generic.go:334] "Generic (PLEG): container finished" podID="22f63ca4-747e-4636-be43-790841b0b0ab" containerID="002f676778ae015fed2d16a7b970b1f1552cef535161a5d430f07b310071a102" exitCode=143 Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.518789 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"22f63ca4-747e-4636-be43-790841b0b0ab","Type":"ContainerDied","Data":"002f676778ae015fed2d16a7b970b1f1552cef535161a5d430f07b310071a102"} Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.531024 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qj4k\" (UniqueName: \"kubernetes.io/projected/b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0-kube-api-access-2qj4k\") pod \"glanceaac4-account-delete-89vvx\" (UID: \"b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0\") " pod="glance-kuttl-tests/glanceaac4-account-delete-89vvx" Nov 24 08:54:21 crc kubenswrapper[4718]: I1124 08:54:21.645539 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glanceaac4-account-delete-89vvx" Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:21.809435 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstackclient" Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:21.914869 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-276km\" (UniqueName: \"kubernetes.io/projected/a264ca86-f795-4ba2-945b-e6459a4d1e68-kube-api-access-276km\") pod \"a264ca86-f795-4ba2-945b-e6459a4d1e68\" (UID: \"a264ca86-f795-4ba2-945b-e6459a4d1e68\") " Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:21.915032 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a264ca86-f795-4ba2-945b-e6459a4d1e68-openstack-config\") pod \"a264ca86-f795-4ba2-945b-e6459a4d1e68\" (UID: \"a264ca86-f795-4ba2-945b-e6459a4d1e68\") " Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:21.915084 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a264ca86-f795-4ba2-945b-e6459a4d1e68-openstack-config-secret\") pod \"a264ca86-f795-4ba2-945b-e6459a4d1e68\" (UID: \"a264ca86-f795-4ba2-945b-e6459a4d1e68\") " Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:21.915157 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/a264ca86-f795-4ba2-945b-e6459a4d1e68-openstack-scripts\") pod \"a264ca86-f795-4ba2-945b-e6459a4d1e68\" (UID: \"a264ca86-f795-4ba2-945b-e6459a4d1e68\") " Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:21.915901 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a264ca86-f795-4ba2-945b-e6459a4d1e68-openstack-scripts" (OuterVolumeSpecName: "openstack-scripts") pod "a264ca86-f795-4ba2-945b-e6459a4d1e68" (UID: "a264ca86-f795-4ba2-945b-e6459a4d1e68"). InnerVolumeSpecName "openstack-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:21.918908 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a264ca86-f795-4ba2-945b-e6459a4d1e68-kube-api-access-276km" (OuterVolumeSpecName: "kube-api-access-276km") pod "a264ca86-f795-4ba2-945b-e6459a4d1e68" (UID: "a264ca86-f795-4ba2-945b-e6459a4d1e68"). InnerVolumeSpecName "kube-api-access-276km". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:21.934098 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a264ca86-f795-4ba2-945b-e6459a4d1e68-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "a264ca86-f795-4ba2-945b-e6459a4d1e68" (UID: "a264ca86-f795-4ba2-945b-e6459a4d1e68"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:21.939914 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a264ca86-f795-4ba2-945b-e6459a4d1e68-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "a264ca86-f795-4ba2-945b-e6459a4d1e68" (UID: "a264ca86-f795-4ba2-945b-e6459a4d1e68"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:22.016660 4718 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a264ca86-f795-4ba2-945b-e6459a4d1e68-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:22.016694 4718 reconciler_common.go:293] "Volume detached for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/a264ca86-f795-4ba2-945b-e6459a4d1e68-openstack-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:22.016703 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-276km\" (UniqueName: \"kubernetes.io/projected/a264ca86-f795-4ba2-945b-e6459a4d1e68-kube-api-access-276km\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:22.016711 4718 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a264ca86-f795-4ba2-945b-e6459a4d1e68-openstack-config\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:22.044867 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:22.044931 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:22.526854 4718 generic.go:334] "Generic (PLEG): container finished" podID="a264ca86-f795-4ba2-945b-e6459a4d1e68" containerID="3ea2cdc0f06352ab9bae5e7a14730ee5c0061f89de2b26e6b2bf3eeeb9a53461" exitCode=143 Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:22.526913 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstackclient" Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:22.526906 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstackclient" event={"ID":"a264ca86-f795-4ba2-945b-e6459a4d1e68","Type":"ContainerDied","Data":"3ea2cdc0f06352ab9bae5e7a14730ee5c0061f89de2b26e6b2bf3eeeb9a53461"} Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:22.527060 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstackclient" event={"ID":"a264ca86-f795-4ba2-945b-e6459a4d1e68","Type":"ContainerDied","Data":"d21bfe1fb23b81f17ae3777b6ac7d57fe95e3fce7d3e06490c08ca5497b663a7"} Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:22.527084 4718 scope.go:117] "RemoveContainer" containerID="3ea2cdc0f06352ab9bae5e7a14730ee5c0061f89de2b26e6b2bf3eeeb9a53461" Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:22.557050 4718 scope.go:117] "RemoveContainer" containerID="3ea2cdc0f06352ab9bae5e7a14730ee5c0061f89de2b26e6b2bf3eeeb9a53461" Nov 24 08:54:22 crc kubenswrapper[4718]: E1124 08:54:22.557891 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ea2cdc0f06352ab9bae5e7a14730ee5c0061f89de2b26e6b2bf3eeeb9a53461\": container with ID starting with 3ea2cdc0f06352ab9bae5e7a14730ee5c0061f89de2b26e6b2bf3eeeb9a53461 not found: ID does not exist" containerID="3ea2cdc0f06352ab9bae5e7a14730ee5c0061f89de2b26e6b2bf3eeeb9a53461" Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:22.558014 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ea2cdc0f06352ab9bae5e7a14730ee5c0061f89de2b26e6b2bf3eeeb9a53461"} err="failed to get container status \"3ea2cdc0f06352ab9bae5e7a14730ee5c0061f89de2b26e6b2bf3eeeb9a53461\": rpc error: code = NotFound desc = could not find container \"3ea2cdc0f06352ab9bae5e7a14730ee5c0061f89de2b26e6b2bf3eeeb9a53461\": container with ID starting with 3ea2cdc0f06352ab9bae5e7a14730ee5c0061f89de2b26e6b2bf3eeeb9a53461 not found: ID does not exist" Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:22.562585 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/openstackclient"] Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:22.569340 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/openstackclient"] Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:22.610266 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c" path="/var/lib/kubelet/pods/5daf94d4-6c69-4b5d-a0e9-ded6ee79cf4c/volumes" Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:22.611219 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a264ca86-f795-4ba2-945b-e6459a4d1e68" path="/var/lib/kubelet/pods/a264ca86-f795-4ba2-945b-e6459a4d1e68/volumes" Nov 24 08:54:22 crc kubenswrapper[4718]: I1124 08:54:22.611926 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glanceaac4-account-delete-89vvx"] Nov 24 08:54:23 crc kubenswrapper[4718]: I1124 08:54:23.537097 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glanceaac4-account-delete-89vvx" event={"ID":"b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0","Type":"ContainerDied","Data":"1bfb3ee2b066050dfefc5309b07ae571d8fa0faccf565ad77ce8b390f879324c"} Nov 24 08:54:23 crc kubenswrapper[4718]: I1124 08:54:23.537799 4718 generic.go:334] "Generic (PLEG): container finished" podID="b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0" containerID="1bfb3ee2b066050dfefc5309b07ae571d8fa0faccf565ad77ce8b390f879324c" exitCode=0 Nov 24 08:54:23 crc kubenswrapper[4718]: I1124 08:54:23.538084 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glanceaac4-account-delete-89vvx" event={"ID":"b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0","Type":"ContainerStarted","Data":"b41537269ac6ba2cf3490b0d2d517d3316552300d941ebf8752c326a649438cf"} Nov 24 08:54:24 crc kubenswrapper[4718]: I1124 08:54:24.562304 4718 generic.go:334] "Generic (PLEG): container finished" podID="22f63ca4-747e-4636-be43-790841b0b0ab" containerID="5279fb7bd59e41f189c77800fe0ed2a775436d51d8d1c097eda3075adce84469" exitCode=0 Nov 24 08:54:24 crc kubenswrapper[4718]: I1124 08:54:24.562503 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"22f63ca4-747e-4636-be43-790841b0b0ab","Type":"ContainerDied","Data":"5279fb7bd59e41f189c77800fe0ed2a775436d51d8d1c097eda3075adce84469"} Nov 24 08:54:24 crc kubenswrapper[4718]: I1124 08:54:24.568752 4718 generic.go:334] "Generic (PLEG): container finished" podID="94bc7b3f-a682-4f02-a7e3-efb363bf98f3" containerID="ac77ece9bf98ea30734caded780705db9bd3fc98b7610deb3a7f3907d5fa5fe3" exitCode=0 Nov 24 08:54:24 crc kubenswrapper[4718]: I1124 08:54:24.568822 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"94bc7b3f-a682-4f02-a7e3-efb363bf98f3","Type":"ContainerDied","Data":"ac77ece9bf98ea30734caded780705db9bd3fc98b7610deb3a7f3907d5fa5fe3"} Nov 24 08:54:24 crc kubenswrapper[4718]: I1124 08:54:24.978398 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:54:24 crc kubenswrapper[4718]: I1124 08:54:24.984174 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glanceaac4-account-delete-89vvx" Nov 24 08:54:24 crc kubenswrapper[4718]: I1124 08:54:24.987944 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.063783 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-var-locks-brick\") pod \"22f63ca4-747e-4636-be43-790841b0b0ab\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.063835 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-etc-nvme\") pod \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.063856 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-lib-modules\") pod \"22f63ca4-747e-4636-be43-790841b0b0ab\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.063886 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0-operator-scripts\") pod \"b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0\" (UID: \"b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.063919 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance-cache\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"22f63ca4-747e-4636-be43-790841b0b0ab\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.063942 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-etc-iscsi\") pod \"22f63ca4-747e-4636-be43-790841b0b0ab\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.063961 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-dev\") pod \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064001 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-run\") pod \"22f63ca4-747e-4636-be43-790841b0b0ab\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064031 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-httpd-run\") pod \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064057 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22f63ca4-747e-4636-be43-790841b0b0ab-config-data\") pod \"22f63ca4-747e-4636-be43-790841b0b0ab\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064079 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-sys\") pod \"22f63ca4-747e-4636-be43-790841b0b0ab\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064111 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-sys\") pod \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064129 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-etc-iscsi\") pod \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064157 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-scripts\") pod \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064187 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-dev\") pod \"22f63ca4-747e-4636-be43-790841b0b0ab\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064207 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-etc-nvme\") pod \"22f63ca4-747e-4636-be43-790841b0b0ab\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064240 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance-cache\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064260 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/22f63ca4-747e-4636-be43-790841b0b0ab-httpd-run\") pod \"22f63ca4-747e-4636-be43-790841b0b0ab\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064284 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"22f63ca4-747e-4636-be43-790841b0b0ab\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064301 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-run\") pod \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064324 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2nzd\" (UniqueName: \"kubernetes.io/projected/22f63ca4-747e-4636-be43-790841b0b0ab-kube-api-access-l2nzd\") pod \"22f63ca4-747e-4636-be43-790841b0b0ab\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064352 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-config-data\") pod \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064522 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-logs\") pod \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064549 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-sys" (OuterVolumeSpecName: "sys") pod "22f63ca4-747e-4636-be43-790841b0b0ab" (UID: "22f63ca4-747e-4636-be43-790841b0b0ab"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064554 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pg4p9\" (UniqueName: \"kubernetes.io/projected/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-kube-api-access-pg4p9\") pod \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064617 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064651 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22f63ca4-747e-4636-be43-790841b0b0ab-logs\") pod \"22f63ca4-747e-4636-be43-790841b0b0ab\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064673 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-var-locks-brick\") pod \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064714 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22f63ca4-747e-4636-be43-790841b0b0ab-scripts\") pod \"22f63ca4-747e-4636-be43-790841b0b0ab\" (UID: \"22f63ca4-747e-4636-be43-790841b0b0ab\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064763 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-lib-modules\") pod \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\" (UID: \"94bc7b3f-a682-4f02-a7e3-efb363bf98f3\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064801 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qj4k\" (UniqueName: \"kubernetes.io/projected/b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0-kube-api-access-2qj4k\") pod \"b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0\" (UID: \"b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0\") " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064877 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-dev" (OuterVolumeSpecName: "dev") pod "94bc7b3f-a682-4f02-a7e3-efb363bf98f3" (UID: "94bc7b3f-a682-4f02-a7e3-efb363bf98f3"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.064958 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "22f63ca4-747e-4636-be43-790841b0b0ab" (UID: "22f63ca4-747e-4636-be43-790841b0b0ab"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.065316 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22f63ca4-747e-4636-be43-790841b0b0ab-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "22f63ca4-747e-4636-be43-790841b0b0ab" (UID: "22f63ca4-747e-4636-be43-790841b0b0ab"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.065384 4718 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-sys\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.065492 4718 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-var-locks-brick\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.065550 4718 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-dev\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.065413 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "94bc7b3f-a682-4f02-a7e3-efb363bf98f3" (UID: "94bc7b3f-a682-4f02-a7e3-efb363bf98f3"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.065433 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "22f63ca4-747e-4636-be43-790841b0b0ab" (UID: "22f63ca4-747e-4636-be43-790841b0b0ab"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.065553 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-run" (OuterVolumeSpecName: "run") pod "94bc7b3f-a682-4f02-a7e3-efb363bf98f3" (UID: "94bc7b3f-a682-4f02-a7e3-efb363bf98f3"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.065777 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "22f63ca4-747e-4636-be43-790841b0b0ab" (UID: "22f63ca4-747e-4636-be43-790841b0b0ab"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.065739 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-sys" (OuterVolumeSpecName: "sys") pod "94bc7b3f-a682-4f02-a7e3-efb363bf98f3" (UID: "94bc7b3f-a682-4f02-a7e3-efb363bf98f3"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.065925 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-dev" (OuterVolumeSpecName: "dev") pod "22f63ca4-747e-4636-be43-790841b0b0ab" (UID: "22f63ca4-747e-4636-be43-790841b0b0ab"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.066015 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "22f63ca4-747e-4636-be43-790841b0b0ab" (UID: "22f63ca4-747e-4636-be43-790841b0b0ab"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.066037 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-logs" (OuterVolumeSpecName: "logs") pod "94bc7b3f-a682-4f02-a7e3-efb363bf98f3" (UID: "94bc7b3f-a682-4f02-a7e3-efb363bf98f3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.066166 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "94bc7b3f-a682-4f02-a7e3-efb363bf98f3" (UID: "94bc7b3f-a682-4f02-a7e3-efb363bf98f3"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.066209 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "94bc7b3f-a682-4f02-a7e3-efb363bf98f3" (UID: "94bc7b3f-a682-4f02-a7e3-efb363bf98f3"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.066649 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22f63ca4-747e-4636-be43-790841b0b0ab-logs" (OuterVolumeSpecName: "logs") pod "22f63ca4-747e-4636-be43-790841b0b0ab" (UID: "22f63ca4-747e-4636-be43-790841b0b0ab"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.067098 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0" (UID: "b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.067129 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "94bc7b3f-a682-4f02-a7e3-efb363bf98f3" (UID: "94bc7b3f-a682-4f02-a7e3-efb363bf98f3"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.067153 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "94bc7b3f-a682-4f02-a7e3-efb363bf98f3" (UID: "94bc7b3f-a682-4f02-a7e3-efb363bf98f3"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.067165 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-run" (OuterVolumeSpecName: "run") pod "22f63ca4-747e-4636-be43-790841b0b0ab" (UID: "22f63ca4-747e-4636-be43-790841b0b0ab"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.071602 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance-cache") pod "94bc7b3f-a682-4f02-a7e3-efb363bf98f3" (UID: "94bc7b3f-a682-4f02-a7e3-efb363bf98f3"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.072098 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-kube-api-access-pg4p9" (OuterVolumeSpecName: "kube-api-access-pg4p9") pod "94bc7b3f-a682-4f02-a7e3-efb363bf98f3" (UID: "94bc7b3f-a682-4f02-a7e3-efb363bf98f3"). InnerVolumeSpecName "kube-api-access-pg4p9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.072256 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0-kube-api-access-2qj4k" (OuterVolumeSpecName: "kube-api-access-2qj4k") pod "b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0" (UID: "b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0"). InnerVolumeSpecName "kube-api-access-2qj4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.072582 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "94bc7b3f-a682-4f02-a7e3-efb363bf98f3" (UID: "94bc7b3f-a682-4f02-a7e3-efb363bf98f3"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.072775 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "22f63ca4-747e-4636-be43-790841b0b0ab" (UID: "22f63ca4-747e-4636-be43-790841b0b0ab"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.073658 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22f63ca4-747e-4636-be43-790841b0b0ab-kube-api-access-l2nzd" (OuterVolumeSpecName: "kube-api-access-l2nzd") pod "22f63ca4-747e-4636-be43-790841b0b0ab" (UID: "22f63ca4-747e-4636-be43-790841b0b0ab"). InnerVolumeSpecName "kube-api-access-l2nzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.074482 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-scripts" (OuterVolumeSpecName: "scripts") pod "94bc7b3f-a682-4f02-a7e3-efb363bf98f3" (UID: "94bc7b3f-a682-4f02-a7e3-efb363bf98f3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.074514 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22f63ca4-747e-4636-be43-790841b0b0ab-scripts" (OuterVolumeSpecName: "scripts") pod "22f63ca4-747e-4636-be43-790841b0b0ab" (UID: "22f63ca4-747e-4636-be43-790841b0b0ab"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.075357 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance-cache") pod "22f63ca4-747e-4636-be43-790841b0b0ab" (UID: "22f63ca4-747e-4636-be43-790841b0b0ab"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.106264 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-config-data" (OuterVolumeSpecName: "config-data") pod "94bc7b3f-a682-4f02-a7e3-efb363bf98f3" (UID: "94bc7b3f-a682-4f02-a7e3-efb363bf98f3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.112651 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22f63ca4-747e-4636-be43-790841b0b0ab-config-data" (OuterVolumeSpecName: "config-data") pod "22f63ca4-747e-4636-be43-790841b0b0ab" (UID: "22f63ca4-747e-4636-be43-790841b0b0ab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.166776 4718 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-etc-nvme\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.166815 4718 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-lib-modules\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.166824 4718 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.166860 4718 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.166870 4718 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-etc-iscsi\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.166881 4718 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-run\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.166889 4718 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.166896 4718 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22f63ca4-747e-4636-be43-790841b0b0ab-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.166904 4718 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-sys\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.166912 4718 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-etc-iscsi\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.166920 4718 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.166930 4718 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-dev\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.166937 4718 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/22f63ca4-747e-4636-be43-790841b0b0ab-etc-nvme\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.166953 4718 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.166961 4718 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/22f63ca4-747e-4636-be43-790841b0b0ab-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.166987 4718 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.166995 4718 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-run\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.167003 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2nzd\" (UniqueName: \"kubernetes.io/projected/22f63ca4-747e-4636-be43-790841b0b0ab-kube-api-access-l2nzd\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.167013 4718 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.167021 4718 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-logs\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.167028 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pg4p9\" (UniqueName: \"kubernetes.io/projected/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-kube-api-access-pg4p9\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.167045 4718 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.167056 4718 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22f63ca4-747e-4636-be43-790841b0b0ab-logs\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.167063 4718 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-var-locks-brick\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.167071 4718 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22f63ca4-747e-4636-be43-790841b0b0ab-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.167078 4718 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/94bc7b3f-a682-4f02-a7e3-efb363bf98f3-lib-modules\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.167087 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qj4k\" (UniqueName: \"kubernetes.io/projected/b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0-kube-api-access-2qj4k\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.180694 4718 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.180853 4718 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.181137 4718 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.181683 4718 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.268385 4718 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.268416 4718 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.268425 4718 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.268434 4718 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.578118 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.578716 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"94bc7b3f-a682-4f02-a7e3-efb363bf98f3","Type":"ContainerDied","Data":"a8d2d25ec07af4dc2b89e88e70accd191d93df97253a3298c233cfb58c3f51b4"} Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.578829 4718 scope.go:117] "RemoveContainer" containerID="ac77ece9bf98ea30734caded780705db9bd3fc98b7610deb3a7f3907d5fa5fe3" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.581520 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"22f63ca4-747e-4636-be43-790841b0b0ab","Type":"ContainerDied","Data":"c11f47fdda6eceb095b7daede75a0795c6de7ad04e24a01383615849f5451699"} Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.581641 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.585197 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glanceaac4-account-delete-89vvx" event={"ID":"b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0","Type":"ContainerDied","Data":"b41537269ac6ba2cf3490b0d2d517d3316552300d941ebf8752c326a649438cf"} Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.585244 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b41537269ac6ba2cf3490b0d2d517d3316552300d941ebf8752c326a649438cf" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.585335 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glanceaac4-account-delete-89vvx" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.610676 4718 scope.go:117] "RemoveContainer" containerID="89cc5e8c573fe4f0234a3fd8566c04476639efb434d02259adfde1a69af995c8" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.616039 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.623221 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.630111 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.634992 4718 scope.go:117] "RemoveContainer" containerID="5279fb7bd59e41f189c77800fe0ed2a775436d51d8d1c097eda3075adce84469" Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.637219 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:54:25 crc kubenswrapper[4718]: I1124 08:54:25.652436 4718 scope.go:117] "RemoveContainer" containerID="002f676778ae015fed2d16a7b970b1f1552cef535161a5d430f07b310071a102" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.287023 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-db-create-xxr4t"] Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.292134 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-db-create-xxr4t"] Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.299569 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-aac4-account-create-update-plrtk"] Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.304720 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glanceaac4-account-delete-89vvx"] Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.311208 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-aac4-account-create-update-plrtk"] Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.316373 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glanceaac4-account-delete-89vvx"] Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.383147 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-db-create-wvjhf"] Nov 24 08:54:26 crc kubenswrapper[4718]: E1124 08:54:26.383467 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94bc7b3f-a682-4f02-a7e3-efb363bf98f3" containerName="glance-httpd" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.383491 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="94bc7b3f-a682-4f02-a7e3-efb363bf98f3" containerName="glance-httpd" Nov 24 08:54:26 crc kubenswrapper[4718]: E1124 08:54:26.383504 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0" containerName="mariadb-account-delete" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.383513 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0" containerName="mariadb-account-delete" Nov 24 08:54:26 crc kubenswrapper[4718]: E1124 08:54:26.383532 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22f63ca4-747e-4636-be43-790841b0b0ab" containerName="glance-httpd" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.383540 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="22f63ca4-747e-4636-be43-790841b0b0ab" containerName="glance-httpd" Nov 24 08:54:26 crc kubenswrapper[4718]: E1124 08:54:26.383554 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a264ca86-f795-4ba2-945b-e6459a4d1e68" containerName="openstackclient" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.383562 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="a264ca86-f795-4ba2-945b-e6459a4d1e68" containerName="openstackclient" Nov 24 08:54:26 crc kubenswrapper[4718]: E1124 08:54:26.383576 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22f63ca4-747e-4636-be43-790841b0b0ab" containerName="glance-log" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.383583 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="22f63ca4-747e-4636-be43-790841b0b0ab" containerName="glance-log" Nov 24 08:54:26 crc kubenswrapper[4718]: E1124 08:54:26.383591 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94bc7b3f-a682-4f02-a7e3-efb363bf98f3" containerName="glance-log" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.383598 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="94bc7b3f-a682-4f02-a7e3-efb363bf98f3" containerName="glance-log" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.383750 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="a264ca86-f795-4ba2-945b-e6459a4d1e68" containerName="openstackclient" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.383763 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="22f63ca4-747e-4636-be43-790841b0b0ab" containerName="glance-httpd" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.383776 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0" containerName="mariadb-account-delete" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.383788 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="94bc7b3f-a682-4f02-a7e3-efb363bf98f3" containerName="glance-httpd" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.383801 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="94bc7b3f-a682-4f02-a7e3-efb363bf98f3" containerName="glance-log" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.383819 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="22f63ca4-747e-4636-be43-790841b0b0ab" containerName="glance-log" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.384494 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-wvjhf" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.390254 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-create-wvjhf"] Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.487267 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d098245-5f65-4b2d-a00a-6c3cc6c954e4-operator-scripts\") pod \"glance-db-create-wvjhf\" (UID: \"2d098245-5f65-4b2d-a00a-6c3cc6c954e4\") " pod="glance-kuttl-tests/glance-db-create-wvjhf" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.488230 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxsm8\" (UniqueName: \"kubernetes.io/projected/2d098245-5f65-4b2d-a00a-6c3cc6c954e4-kube-api-access-zxsm8\") pod \"glance-db-create-wvjhf\" (UID: \"2d098245-5f65-4b2d-a00a-6c3cc6c954e4\") " pod="glance-kuttl-tests/glance-db-create-wvjhf" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.492290 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-f30c-account-create-update-dh9lq"] Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.493371 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-f30c-account-create-update-dh9lq" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.497729 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-db-secret" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.501452 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-f30c-account-create-update-dh9lq"] Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.590076 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d098245-5f65-4b2d-a00a-6c3cc6c954e4-operator-scripts\") pod \"glance-db-create-wvjhf\" (UID: \"2d098245-5f65-4b2d-a00a-6c3cc6c954e4\") " pod="glance-kuttl-tests/glance-db-create-wvjhf" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.590141 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxsm8\" (UniqueName: \"kubernetes.io/projected/2d098245-5f65-4b2d-a00a-6c3cc6c954e4-kube-api-access-zxsm8\") pod \"glance-db-create-wvjhf\" (UID: \"2d098245-5f65-4b2d-a00a-6c3cc6c954e4\") " pod="glance-kuttl-tests/glance-db-create-wvjhf" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.590197 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6lb7\" (UniqueName: \"kubernetes.io/projected/fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5-kube-api-access-l6lb7\") pod \"glance-f30c-account-create-update-dh9lq\" (UID: \"fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5\") " pod="glance-kuttl-tests/glance-f30c-account-create-update-dh9lq" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.590230 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5-operator-scripts\") pod \"glance-f30c-account-create-update-dh9lq\" (UID: \"fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5\") " pod="glance-kuttl-tests/glance-f30c-account-create-update-dh9lq" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.591002 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d098245-5f65-4b2d-a00a-6c3cc6c954e4-operator-scripts\") pod \"glance-db-create-wvjhf\" (UID: \"2d098245-5f65-4b2d-a00a-6c3cc6c954e4\") " pod="glance-kuttl-tests/glance-db-create-wvjhf" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.604885 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22f63ca4-747e-4636-be43-790841b0b0ab" path="/var/lib/kubelet/pods/22f63ca4-747e-4636-be43-790841b0b0ab/volumes" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.605594 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="485cca71-1560-4c07-b6eb-2139ec7c4e97" path="/var/lib/kubelet/pods/485cca71-1560-4c07-b6eb-2139ec7c4e97/volumes" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.606661 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94bc7b3f-a682-4f02-a7e3-efb363bf98f3" path="/var/lib/kubelet/pods/94bc7b3f-a682-4f02-a7e3-efb363bf98f3/volumes" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.607316 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c853b5f-821e-4082-a3d4-3d914c6ef527" path="/var/lib/kubelet/pods/9c853b5f-821e-4082-a3d4-3d914c6ef527/volumes" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.607824 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0" path="/var/lib/kubelet/pods/b16c2eb1-1c3c-4b2e-ac79-cc1678ed74e0/volumes" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.611039 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxsm8\" (UniqueName: \"kubernetes.io/projected/2d098245-5f65-4b2d-a00a-6c3cc6c954e4-kube-api-access-zxsm8\") pod \"glance-db-create-wvjhf\" (UID: \"2d098245-5f65-4b2d-a00a-6c3cc6c954e4\") " pod="glance-kuttl-tests/glance-db-create-wvjhf" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.691857 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5-operator-scripts\") pod \"glance-f30c-account-create-update-dh9lq\" (UID: \"fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5\") " pod="glance-kuttl-tests/glance-f30c-account-create-update-dh9lq" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.692020 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6lb7\" (UniqueName: \"kubernetes.io/projected/fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5-kube-api-access-l6lb7\") pod \"glance-f30c-account-create-update-dh9lq\" (UID: \"fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5\") " pod="glance-kuttl-tests/glance-f30c-account-create-update-dh9lq" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.692589 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5-operator-scripts\") pod \"glance-f30c-account-create-update-dh9lq\" (UID: \"fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5\") " pod="glance-kuttl-tests/glance-f30c-account-create-update-dh9lq" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.709193 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6lb7\" (UniqueName: \"kubernetes.io/projected/fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5-kube-api-access-l6lb7\") pod \"glance-f30c-account-create-update-dh9lq\" (UID: \"fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5\") " pod="glance-kuttl-tests/glance-f30c-account-create-update-dh9lq" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.744582 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-wvjhf" Nov 24 08:54:26 crc kubenswrapper[4718]: I1124 08:54:26.820330 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-f30c-account-create-update-dh9lq" Nov 24 08:54:27 crc kubenswrapper[4718]: I1124 08:54:27.162817 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-create-wvjhf"] Nov 24 08:54:27 crc kubenswrapper[4718]: W1124 08:54:27.167823 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d098245_5f65_4b2d_a00a_6c3cc6c954e4.slice/crio-2764d5a852c0f9e4824828a51e2a45884764a7560bebd0b9a4e45ba2a0c0a275 WatchSource:0}: Error finding container 2764d5a852c0f9e4824828a51e2a45884764a7560bebd0b9a4e45ba2a0c0a275: Status 404 returned error can't find the container with id 2764d5a852c0f9e4824828a51e2a45884764a7560bebd0b9a4e45ba2a0c0a275 Nov 24 08:54:27 crc kubenswrapper[4718]: I1124 08:54:27.238462 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-f30c-account-create-update-dh9lq"] Nov 24 08:54:27 crc kubenswrapper[4718]: I1124 08:54:27.605509 4718 generic.go:334] "Generic (PLEG): container finished" podID="fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5" containerID="556cbcc6dcc6c37976558b25b353deea6ebc6ae6f5747684123c6a33da074b0e" exitCode=0 Nov 24 08:54:27 crc kubenswrapper[4718]: I1124 08:54:27.605580 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-f30c-account-create-update-dh9lq" event={"ID":"fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5","Type":"ContainerDied","Data":"556cbcc6dcc6c37976558b25b353deea6ebc6ae6f5747684123c6a33da074b0e"} Nov 24 08:54:27 crc kubenswrapper[4718]: I1124 08:54:27.605936 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-f30c-account-create-update-dh9lq" event={"ID":"fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5","Type":"ContainerStarted","Data":"a280272421c8161d9803c7a43fab9af6fd1a6332a9a3c310c1714a5ad47d7f11"} Nov 24 08:54:27 crc kubenswrapper[4718]: I1124 08:54:27.607929 4718 generic.go:334] "Generic (PLEG): container finished" podID="2d098245-5f65-4b2d-a00a-6c3cc6c954e4" containerID="b75a3576e90bc6c8d89e44520da9cd75e99bf98c2b18b0ac9c3b1dbadfc061ba" exitCode=0 Nov 24 08:54:27 crc kubenswrapper[4718]: I1124 08:54:27.607954 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-wvjhf" event={"ID":"2d098245-5f65-4b2d-a00a-6c3cc6c954e4","Type":"ContainerDied","Data":"b75a3576e90bc6c8d89e44520da9cd75e99bf98c2b18b0ac9c3b1dbadfc061ba"} Nov 24 08:54:27 crc kubenswrapper[4718]: I1124 08:54:27.607985 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-wvjhf" event={"ID":"2d098245-5f65-4b2d-a00a-6c3cc6c954e4","Type":"ContainerStarted","Data":"2764d5a852c0f9e4824828a51e2a45884764a7560bebd0b9a4e45ba2a0c0a275"} Nov 24 08:54:28 crc kubenswrapper[4718]: I1124 08:54:28.953511 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-f30c-account-create-update-dh9lq" Nov 24 08:54:28 crc kubenswrapper[4718]: I1124 08:54:28.959791 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-wvjhf" Nov 24 08:54:29 crc kubenswrapper[4718]: I1124 08:54:29.029382 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d098245-5f65-4b2d-a00a-6c3cc6c954e4-operator-scripts\") pod \"2d098245-5f65-4b2d-a00a-6c3cc6c954e4\" (UID: \"2d098245-5f65-4b2d-a00a-6c3cc6c954e4\") " Nov 24 08:54:29 crc kubenswrapper[4718]: I1124 08:54:29.029478 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5-operator-scripts\") pod \"fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5\" (UID: \"fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5\") " Nov 24 08:54:29 crc kubenswrapper[4718]: I1124 08:54:29.029518 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxsm8\" (UniqueName: \"kubernetes.io/projected/2d098245-5f65-4b2d-a00a-6c3cc6c954e4-kube-api-access-zxsm8\") pod \"2d098245-5f65-4b2d-a00a-6c3cc6c954e4\" (UID: \"2d098245-5f65-4b2d-a00a-6c3cc6c954e4\") " Nov 24 08:54:29 crc kubenswrapper[4718]: I1124 08:54:29.029607 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6lb7\" (UniqueName: \"kubernetes.io/projected/fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5-kube-api-access-l6lb7\") pod \"fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5\" (UID: \"fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5\") " Nov 24 08:54:29 crc kubenswrapper[4718]: I1124 08:54:29.029915 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5" (UID: "fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:54:29 crc kubenswrapper[4718]: I1124 08:54:29.029942 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d098245-5f65-4b2d-a00a-6c3cc6c954e4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2d098245-5f65-4b2d-a00a-6c3cc6c954e4" (UID: "2d098245-5f65-4b2d-a00a-6c3cc6c954e4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:54:29 crc kubenswrapper[4718]: I1124 08:54:29.036797 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5-kube-api-access-l6lb7" (OuterVolumeSpecName: "kube-api-access-l6lb7") pod "fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5" (UID: "fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5"). InnerVolumeSpecName "kube-api-access-l6lb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:54:29 crc kubenswrapper[4718]: I1124 08:54:29.038706 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d098245-5f65-4b2d-a00a-6c3cc6c954e4-kube-api-access-zxsm8" (OuterVolumeSpecName: "kube-api-access-zxsm8") pod "2d098245-5f65-4b2d-a00a-6c3cc6c954e4" (UID: "2d098245-5f65-4b2d-a00a-6c3cc6c954e4"). InnerVolumeSpecName "kube-api-access-zxsm8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:54:29 crc kubenswrapper[4718]: I1124 08:54:29.131076 4718 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:29 crc kubenswrapper[4718]: I1124 08:54:29.131113 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxsm8\" (UniqueName: \"kubernetes.io/projected/2d098245-5f65-4b2d-a00a-6c3cc6c954e4-kube-api-access-zxsm8\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:29 crc kubenswrapper[4718]: I1124 08:54:29.131125 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6lb7\" (UniqueName: \"kubernetes.io/projected/fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5-kube-api-access-l6lb7\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:29 crc kubenswrapper[4718]: I1124 08:54:29.131134 4718 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d098245-5f65-4b2d-a00a-6c3cc6c954e4-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:29 crc kubenswrapper[4718]: I1124 08:54:29.629616 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-wvjhf" event={"ID":"2d098245-5f65-4b2d-a00a-6c3cc6c954e4","Type":"ContainerDied","Data":"2764d5a852c0f9e4824828a51e2a45884764a7560bebd0b9a4e45ba2a0c0a275"} Nov 24 08:54:29 crc kubenswrapper[4718]: I1124 08:54:29.629664 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2764d5a852c0f9e4824828a51e2a45884764a7560bebd0b9a4e45ba2a0c0a275" Nov 24 08:54:29 crc kubenswrapper[4718]: I1124 08:54:29.629660 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-wvjhf" Nov 24 08:54:29 crc kubenswrapper[4718]: I1124 08:54:29.630889 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-f30c-account-create-update-dh9lq" event={"ID":"fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5","Type":"ContainerDied","Data":"a280272421c8161d9803c7a43fab9af6fd1a6332a9a3c310c1714a5ad47d7f11"} Nov 24 08:54:29 crc kubenswrapper[4718]: I1124 08:54:29.630915 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a280272421c8161d9803c7a43fab9af6fd1a6332a9a3c310c1714a5ad47d7f11" Nov 24 08:54:29 crc kubenswrapper[4718]: I1124 08:54:29.630954 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-f30c-account-create-update-dh9lq" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.667016 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-db-sync-5zrsx"] Nov 24 08:54:31 crc kubenswrapper[4718]: E1124 08:54:31.668422 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d098245-5f65-4b2d-a00a-6c3cc6c954e4" containerName="mariadb-database-create" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.668510 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d098245-5f65-4b2d-a00a-6c3cc6c954e4" containerName="mariadb-database-create" Nov 24 08:54:31 crc kubenswrapper[4718]: E1124 08:54:31.668584 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5" containerName="mariadb-account-create-update" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.668642 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5" containerName="mariadb-account-create-update" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.668828 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5" containerName="mariadb-account-create-update" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.668882 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d098245-5f65-4b2d-a00a-6c3cc6c954e4" containerName="mariadb-database-create" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.669444 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-5zrsx" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.671275 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-config-data" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.671483 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-zvq88" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.671517 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"combined-ca-bundle" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.681624 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-sync-5zrsx"] Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.769278 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5d7425d6-7136-4893-8476-cec35b856f17-db-sync-config-data\") pod \"glance-db-sync-5zrsx\" (UID: \"5d7425d6-7136-4893-8476-cec35b856f17\") " pod="glance-kuttl-tests/glance-db-sync-5zrsx" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.769326 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d7425d6-7136-4893-8476-cec35b856f17-combined-ca-bundle\") pod \"glance-db-sync-5zrsx\" (UID: \"5d7425d6-7136-4893-8476-cec35b856f17\") " pod="glance-kuttl-tests/glance-db-sync-5zrsx" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.769363 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d7425d6-7136-4893-8476-cec35b856f17-config-data\") pod \"glance-db-sync-5zrsx\" (UID: \"5d7425d6-7136-4893-8476-cec35b856f17\") " pod="glance-kuttl-tests/glance-db-sync-5zrsx" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.769431 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sz8tg\" (UniqueName: \"kubernetes.io/projected/5d7425d6-7136-4893-8476-cec35b856f17-kube-api-access-sz8tg\") pod \"glance-db-sync-5zrsx\" (UID: \"5d7425d6-7136-4893-8476-cec35b856f17\") " pod="glance-kuttl-tests/glance-db-sync-5zrsx" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.870860 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5d7425d6-7136-4893-8476-cec35b856f17-db-sync-config-data\") pod \"glance-db-sync-5zrsx\" (UID: \"5d7425d6-7136-4893-8476-cec35b856f17\") " pod="glance-kuttl-tests/glance-db-sync-5zrsx" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.870913 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d7425d6-7136-4893-8476-cec35b856f17-combined-ca-bundle\") pod \"glance-db-sync-5zrsx\" (UID: \"5d7425d6-7136-4893-8476-cec35b856f17\") " pod="glance-kuttl-tests/glance-db-sync-5zrsx" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.870943 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d7425d6-7136-4893-8476-cec35b856f17-config-data\") pod \"glance-db-sync-5zrsx\" (UID: \"5d7425d6-7136-4893-8476-cec35b856f17\") " pod="glance-kuttl-tests/glance-db-sync-5zrsx" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.871016 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sz8tg\" (UniqueName: \"kubernetes.io/projected/5d7425d6-7136-4893-8476-cec35b856f17-kube-api-access-sz8tg\") pod \"glance-db-sync-5zrsx\" (UID: \"5d7425d6-7136-4893-8476-cec35b856f17\") " pod="glance-kuttl-tests/glance-db-sync-5zrsx" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.876830 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d7425d6-7136-4893-8476-cec35b856f17-combined-ca-bundle\") pod \"glance-db-sync-5zrsx\" (UID: \"5d7425d6-7136-4893-8476-cec35b856f17\") " pod="glance-kuttl-tests/glance-db-sync-5zrsx" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.877319 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5d7425d6-7136-4893-8476-cec35b856f17-db-sync-config-data\") pod \"glance-db-sync-5zrsx\" (UID: \"5d7425d6-7136-4893-8476-cec35b856f17\") " pod="glance-kuttl-tests/glance-db-sync-5zrsx" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.887578 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d7425d6-7136-4893-8476-cec35b856f17-config-data\") pod \"glance-db-sync-5zrsx\" (UID: \"5d7425d6-7136-4893-8476-cec35b856f17\") " pod="glance-kuttl-tests/glance-db-sync-5zrsx" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.894476 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sz8tg\" (UniqueName: \"kubernetes.io/projected/5d7425d6-7136-4893-8476-cec35b856f17-kube-api-access-sz8tg\") pod \"glance-db-sync-5zrsx\" (UID: \"5d7425d6-7136-4893-8476-cec35b856f17\") " pod="glance-kuttl-tests/glance-db-sync-5zrsx" Nov 24 08:54:31 crc kubenswrapper[4718]: I1124 08:54:31.994196 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-5zrsx" Nov 24 08:54:32 crc kubenswrapper[4718]: I1124 08:54:32.506860 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-sync-5zrsx"] Nov 24 08:54:32 crc kubenswrapper[4718]: I1124 08:54:32.649960 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-5zrsx" event={"ID":"5d7425d6-7136-4893-8476-cec35b856f17","Type":"ContainerStarted","Data":"6887a13ac71071cdd470aafc989e0280eae19f970924a31084c770ab0255ae89"} Nov 24 08:54:33 crc kubenswrapper[4718]: I1124 08:54:33.659456 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-5zrsx" event={"ID":"5d7425d6-7136-4893-8476-cec35b856f17","Type":"ContainerStarted","Data":"10598ea74b1401763667babf28b7f0c3618557d8c8083d034f3ed5e0843fc6ad"} Nov 24 08:54:33 crc kubenswrapper[4718]: I1124 08:54:33.676045 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-db-sync-5zrsx" podStartSLOduration=2.676028379 podStartE2EDuration="2.676028379s" podCreationTimestamp="2025-11-24 08:54:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:54:33.671768984 +0000 UTC m=+1145.788059898" watchObservedRunningTime="2025-11-24 08:54:33.676028379 +0000 UTC m=+1145.792319303" Nov 24 08:54:36 crc kubenswrapper[4718]: I1124 08:54:36.690505 4718 generic.go:334] "Generic (PLEG): container finished" podID="5d7425d6-7136-4893-8476-cec35b856f17" containerID="10598ea74b1401763667babf28b7f0c3618557d8c8083d034f3ed5e0843fc6ad" exitCode=0 Nov 24 08:54:36 crc kubenswrapper[4718]: I1124 08:54:36.690572 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-5zrsx" event={"ID":"5d7425d6-7136-4893-8476-cec35b856f17","Type":"ContainerDied","Data":"10598ea74b1401763667babf28b7f0c3618557d8c8083d034f3ed5e0843fc6ad"} Nov 24 08:54:37 crc kubenswrapper[4718]: I1124 08:54:37.971575 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-5zrsx" Nov 24 08:54:38 crc kubenswrapper[4718]: I1124 08:54:38.168671 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sz8tg\" (UniqueName: \"kubernetes.io/projected/5d7425d6-7136-4893-8476-cec35b856f17-kube-api-access-sz8tg\") pod \"5d7425d6-7136-4893-8476-cec35b856f17\" (UID: \"5d7425d6-7136-4893-8476-cec35b856f17\") " Nov 24 08:54:38 crc kubenswrapper[4718]: I1124 08:54:38.168860 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d7425d6-7136-4893-8476-cec35b856f17-combined-ca-bundle\") pod \"5d7425d6-7136-4893-8476-cec35b856f17\" (UID: \"5d7425d6-7136-4893-8476-cec35b856f17\") " Nov 24 08:54:38 crc kubenswrapper[4718]: I1124 08:54:38.169092 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d7425d6-7136-4893-8476-cec35b856f17-config-data\") pod \"5d7425d6-7136-4893-8476-cec35b856f17\" (UID: \"5d7425d6-7136-4893-8476-cec35b856f17\") " Nov 24 08:54:38 crc kubenswrapper[4718]: I1124 08:54:38.169195 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5d7425d6-7136-4893-8476-cec35b856f17-db-sync-config-data\") pod \"5d7425d6-7136-4893-8476-cec35b856f17\" (UID: \"5d7425d6-7136-4893-8476-cec35b856f17\") " Nov 24 08:54:38 crc kubenswrapper[4718]: I1124 08:54:38.176380 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d7425d6-7136-4893-8476-cec35b856f17-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "5d7425d6-7136-4893-8476-cec35b856f17" (UID: "5d7425d6-7136-4893-8476-cec35b856f17"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:54:38 crc kubenswrapper[4718]: I1124 08:54:38.182263 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d7425d6-7136-4893-8476-cec35b856f17-kube-api-access-sz8tg" (OuterVolumeSpecName: "kube-api-access-sz8tg") pod "5d7425d6-7136-4893-8476-cec35b856f17" (UID: "5d7425d6-7136-4893-8476-cec35b856f17"). InnerVolumeSpecName "kube-api-access-sz8tg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:54:38 crc kubenswrapper[4718]: I1124 08:54:38.203487 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d7425d6-7136-4893-8476-cec35b856f17-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5d7425d6-7136-4893-8476-cec35b856f17" (UID: "5d7425d6-7136-4893-8476-cec35b856f17"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:54:38 crc kubenswrapper[4718]: I1124 08:54:38.235152 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d7425d6-7136-4893-8476-cec35b856f17-config-data" (OuterVolumeSpecName: "config-data") pod "5d7425d6-7136-4893-8476-cec35b856f17" (UID: "5d7425d6-7136-4893-8476-cec35b856f17"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:54:38 crc kubenswrapper[4718]: I1124 08:54:38.271360 4718 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d7425d6-7136-4893-8476-cec35b856f17-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:38 crc kubenswrapper[4718]: I1124 08:54:38.271406 4718 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5d7425d6-7136-4893-8476-cec35b856f17-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:38 crc kubenswrapper[4718]: I1124 08:54:38.271421 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sz8tg\" (UniqueName: \"kubernetes.io/projected/5d7425d6-7136-4893-8476-cec35b856f17-kube-api-access-sz8tg\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:38 crc kubenswrapper[4718]: I1124 08:54:38.271434 4718 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d7425d6-7136-4893-8476-cec35b856f17-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.615750 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-5zrsx" event={"ID":"5d7425d6-7136-4893-8476-cec35b856f17","Type":"ContainerDied","Data":"6887a13ac71071cdd470aafc989e0280eae19f970924a31084c770ab0255ae89"} Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.615807 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6887a13ac71071cdd470aafc989e0280eae19f970924a31084c770ab0255ae89" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.615883 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-5zrsx" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.717787 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:54:39 crc kubenswrapper[4718]: E1124 08:54:39.718179 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d7425d6-7136-4893-8476-cec35b856f17" containerName="glance-db-sync" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.718195 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d7425d6-7136-4893-8476-cec35b856f17" containerName="glance-db-sync" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.718337 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d7425d6-7136-4893-8476-cec35b856f17" containerName="glance-db-sync" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.719143 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.723271 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-scripts" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.726016 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"cert-glance-default-public-svc" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.726126 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"combined-ca-bundle" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.726211 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-zvq88" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.726256 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"cert-glance-default-internal-svc" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.727128 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-single-config-data" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.742940 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.888102 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f446bd33-b08a-4730-8ccc-c026cbc03ce9-logs\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.888161 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.888179 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-scripts\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.888203 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f446bd33-b08a-4730-8ccc-c026cbc03ce9-httpd-run\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.888219 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t57t5\" (UniqueName: \"kubernetes.io/projected/f446bd33-b08a-4730-8ccc-c026cbc03ce9-kube-api-access-t57t5\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.888281 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-internal-tls-certs\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.888311 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-combined-ca-bundle\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.888336 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-public-tls-certs\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.888355 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-config-data\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.989451 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-public-tls-certs\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.989520 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-config-data\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.989562 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f446bd33-b08a-4730-8ccc-c026cbc03ce9-logs\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.989598 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.989614 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-scripts\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.989639 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f446bd33-b08a-4730-8ccc-c026cbc03ce9-httpd-run\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.989689 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t57t5\" (UniqueName: \"kubernetes.io/projected/f446bd33-b08a-4730-8ccc-c026cbc03ce9-kube-api-access-t57t5\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.989777 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-internal-tls-certs\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.989796 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-combined-ca-bundle\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.990398 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") device mount path \"/mnt/openstack/pv09\"" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.990707 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f446bd33-b08a-4730-8ccc-c026cbc03ce9-logs\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.991145 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f446bd33-b08a-4730-8ccc-c026cbc03ce9-httpd-run\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.996069 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-internal-tls-certs\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:39 crc kubenswrapper[4718]: I1124 08:54:39.996620 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-combined-ca-bundle\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:40 crc kubenswrapper[4718]: I1124 08:54:40.000679 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-scripts\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:40 crc kubenswrapper[4718]: I1124 08:54:40.003098 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-config-data\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:40 crc kubenswrapper[4718]: I1124 08:54:40.007721 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-public-tls-certs\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:40 crc kubenswrapper[4718]: I1124 08:54:40.008192 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t57t5\" (UniqueName: \"kubernetes.io/projected/f446bd33-b08a-4730-8ccc-c026cbc03ce9-kube-api-access-t57t5\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:40 crc kubenswrapper[4718]: I1124 08:54:40.017935 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-single-0\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:40 crc kubenswrapper[4718]: I1124 08:54:40.040823 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:40 crc kubenswrapper[4718]: I1124 08:54:40.342226 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:54:40 crc kubenswrapper[4718]: I1124 08:54:40.625342 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"f446bd33-b08a-4730-8ccc-c026cbc03ce9","Type":"ContainerStarted","Data":"1b77cdf00736104f7bcfaec7f9ad0881b4c3270eb9879c92d9e6d5a224524ddf"} Nov 24 08:54:40 crc kubenswrapper[4718]: I1124 08:54:40.801581 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:54:41 crc kubenswrapper[4718]: I1124 08:54:41.636188 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"f446bd33-b08a-4730-8ccc-c026cbc03ce9","Type":"ContainerStarted","Data":"265bec9f997b29178cf3b4e26654ab56c7394460db126f9a35dda2057dc08f12"} Nov 24 08:54:41 crc kubenswrapper[4718]: I1124 08:54:41.636485 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"f446bd33-b08a-4730-8ccc-c026cbc03ce9","Type":"ContainerStarted","Data":"33bc5ec6398e2e811feaca5df94eb8ec9a7ab000035ec9254c590705ad406855"} Nov 24 08:54:41 crc kubenswrapper[4718]: I1124 08:54:41.636331 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="f446bd33-b08a-4730-8ccc-c026cbc03ce9" containerName="glance-log" containerID="cri-o://33bc5ec6398e2e811feaca5df94eb8ec9a7ab000035ec9254c590705ad406855" gracePeriod=30 Nov 24 08:54:41 crc kubenswrapper[4718]: I1124 08:54:41.636569 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="f446bd33-b08a-4730-8ccc-c026cbc03ce9" containerName="glance-httpd" containerID="cri-o://265bec9f997b29178cf3b4e26654ab56c7394460db126f9a35dda2057dc08f12" gracePeriod=30 Nov 24 08:54:41 crc kubenswrapper[4718]: I1124 08:54:41.667605 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-single-0" podStartSLOduration=2.667585174 podStartE2EDuration="2.667585174s" podCreationTimestamp="2025-11-24 08:54:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:54:41.659552826 +0000 UTC m=+1153.775843730" watchObservedRunningTime="2025-11-24 08:54:41.667585174 +0000 UTC m=+1153.783876078" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.131315 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.322962 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t57t5\" (UniqueName: \"kubernetes.io/projected/f446bd33-b08a-4730-8ccc-c026cbc03ce9-kube-api-access-t57t5\") pod \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.323471 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f446bd33-b08a-4730-8ccc-c026cbc03ce9-logs\") pod \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.323543 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-scripts\") pod \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.323575 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-combined-ca-bundle\") pod \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.323615 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.323636 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-internal-tls-certs\") pod \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.323678 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-config-data\") pod \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.323694 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-public-tls-certs\") pod \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.323718 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f446bd33-b08a-4730-8ccc-c026cbc03ce9-httpd-run\") pod \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\" (UID: \"f446bd33-b08a-4730-8ccc-c026cbc03ce9\") " Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.323822 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f446bd33-b08a-4730-8ccc-c026cbc03ce9-logs" (OuterVolumeSpecName: "logs") pod "f446bd33-b08a-4730-8ccc-c026cbc03ce9" (UID: "f446bd33-b08a-4730-8ccc-c026cbc03ce9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.324024 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f446bd33-b08a-4730-8ccc-c026cbc03ce9-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f446bd33-b08a-4730-8ccc-c026cbc03ce9" (UID: "f446bd33-b08a-4730-8ccc-c026cbc03ce9"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.324231 4718 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f446bd33-b08a-4730-8ccc-c026cbc03ce9-logs\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.324254 4718 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f446bd33-b08a-4730-8ccc-c026cbc03ce9-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.328170 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f446bd33-b08a-4730-8ccc-c026cbc03ce9-kube-api-access-t57t5" (OuterVolumeSpecName: "kube-api-access-t57t5") pod "f446bd33-b08a-4730-8ccc-c026cbc03ce9" (UID: "f446bd33-b08a-4730-8ccc-c026cbc03ce9"). InnerVolumeSpecName "kube-api-access-t57t5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.332121 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "f446bd33-b08a-4730-8ccc-c026cbc03ce9" (UID: "f446bd33-b08a-4730-8ccc-c026cbc03ce9"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.332153 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-scripts" (OuterVolumeSpecName: "scripts") pod "f446bd33-b08a-4730-8ccc-c026cbc03ce9" (UID: "f446bd33-b08a-4730-8ccc-c026cbc03ce9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.343940 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f446bd33-b08a-4730-8ccc-c026cbc03ce9" (UID: "f446bd33-b08a-4730-8ccc-c026cbc03ce9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.363338 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-config-data" (OuterVolumeSpecName: "config-data") pod "f446bd33-b08a-4730-8ccc-c026cbc03ce9" (UID: "f446bd33-b08a-4730-8ccc-c026cbc03ce9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.365447 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f446bd33-b08a-4730-8ccc-c026cbc03ce9" (UID: "f446bd33-b08a-4730-8ccc-c026cbc03ce9"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.368219 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f446bd33-b08a-4730-8ccc-c026cbc03ce9" (UID: "f446bd33-b08a-4730-8ccc-c026cbc03ce9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.426019 4718 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.426079 4718 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.426124 4718 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.426160 4718 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.426171 4718 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.426179 4718 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f446bd33-b08a-4730-8ccc-c026cbc03ce9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.426189 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t57t5\" (UniqueName: \"kubernetes.io/projected/f446bd33-b08a-4730-8ccc-c026cbc03ce9-kube-api-access-t57t5\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.440167 4718 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.527665 4718 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.648028 4718 generic.go:334] "Generic (PLEG): container finished" podID="f446bd33-b08a-4730-8ccc-c026cbc03ce9" containerID="265bec9f997b29178cf3b4e26654ab56c7394460db126f9a35dda2057dc08f12" exitCode=0 Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.648061 4718 generic.go:334] "Generic (PLEG): container finished" podID="f446bd33-b08a-4730-8ccc-c026cbc03ce9" containerID="33bc5ec6398e2e811feaca5df94eb8ec9a7ab000035ec9254c590705ad406855" exitCode=143 Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.648082 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"f446bd33-b08a-4730-8ccc-c026cbc03ce9","Type":"ContainerDied","Data":"265bec9f997b29178cf3b4e26654ab56c7394460db126f9a35dda2057dc08f12"} Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.648105 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.648133 4718 scope.go:117] "RemoveContainer" containerID="265bec9f997b29178cf3b4e26654ab56c7394460db126f9a35dda2057dc08f12" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.648121 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"f446bd33-b08a-4730-8ccc-c026cbc03ce9","Type":"ContainerDied","Data":"33bc5ec6398e2e811feaca5df94eb8ec9a7ab000035ec9254c590705ad406855"} Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.648252 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"f446bd33-b08a-4730-8ccc-c026cbc03ce9","Type":"ContainerDied","Data":"1b77cdf00736104f7bcfaec7f9ad0881b4c3270eb9879c92d9e6d5a224524ddf"} Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.674170 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.679500 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.680195 4718 scope.go:117] "RemoveContainer" containerID="33bc5ec6398e2e811feaca5df94eb8ec9a7ab000035ec9254c590705ad406855" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.699069 4718 scope.go:117] "RemoveContainer" containerID="265bec9f997b29178cf3b4e26654ab56c7394460db126f9a35dda2057dc08f12" Nov 24 08:54:42 crc kubenswrapper[4718]: E1124 08:54:42.700379 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"265bec9f997b29178cf3b4e26654ab56c7394460db126f9a35dda2057dc08f12\": container with ID starting with 265bec9f997b29178cf3b4e26654ab56c7394460db126f9a35dda2057dc08f12 not found: ID does not exist" containerID="265bec9f997b29178cf3b4e26654ab56c7394460db126f9a35dda2057dc08f12" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.700426 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"265bec9f997b29178cf3b4e26654ab56c7394460db126f9a35dda2057dc08f12"} err="failed to get container status \"265bec9f997b29178cf3b4e26654ab56c7394460db126f9a35dda2057dc08f12\": rpc error: code = NotFound desc = could not find container \"265bec9f997b29178cf3b4e26654ab56c7394460db126f9a35dda2057dc08f12\": container with ID starting with 265bec9f997b29178cf3b4e26654ab56c7394460db126f9a35dda2057dc08f12 not found: ID does not exist" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.700448 4718 scope.go:117] "RemoveContainer" containerID="33bc5ec6398e2e811feaca5df94eb8ec9a7ab000035ec9254c590705ad406855" Nov 24 08:54:42 crc kubenswrapper[4718]: E1124 08:54:42.700693 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33bc5ec6398e2e811feaca5df94eb8ec9a7ab000035ec9254c590705ad406855\": container with ID starting with 33bc5ec6398e2e811feaca5df94eb8ec9a7ab000035ec9254c590705ad406855 not found: ID does not exist" containerID="33bc5ec6398e2e811feaca5df94eb8ec9a7ab000035ec9254c590705ad406855" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.700713 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33bc5ec6398e2e811feaca5df94eb8ec9a7ab000035ec9254c590705ad406855"} err="failed to get container status \"33bc5ec6398e2e811feaca5df94eb8ec9a7ab000035ec9254c590705ad406855\": rpc error: code = NotFound desc = could not find container \"33bc5ec6398e2e811feaca5df94eb8ec9a7ab000035ec9254c590705ad406855\": container with ID starting with 33bc5ec6398e2e811feaca5df94eb8ec9a7ab000035ec9254c590705ad406855 not found: ID does not exist" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.700726 4718 scope.go:117] "RemoveContainer" containerID="265bec9f997b29178cf3b4e26654ab56c7394460db126f9a35dda2057dc08f12" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.701069 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"265bec9f997b29178cf3b4e26654ab56c7394460db126f9a35dda2057dc08f12"} err="failed to get container status \"265bec9f997b29178cf3b4e26654ab56c7394460db126f9a35dda2057dc08f12\": rpc error: code = NotFound desc = could not find container \"265bec9f997b29178cf3b4e26654ab56c7394460db126f9a35dda2057dc08f12\": container with ID starting with 265bec9f997b29178cf3b4e26654ab56c7394460db126f9a35dda2057dc08f12 not found: ID does not exist" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.701120 4718 scope.go:117] "RemoveContainer" containerID="33bc5ec6398e2e811feaca5df94eb8ec9a7ab000035ec9254c590705ad406855" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.701825 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33bc5ec6398e2e811feaca5df94eb8ec9a7ab000035ec9254c590705ad406855"} err="failed to get container status \"33bc5ec6398e2e811feaca5df94eb8ec9a7ab000035ec9254c590705ad406855\": rpc error: code = NotFound desc = could not find container \"33bc5ec6398e2e811feaca5df94eb8ec9a7ab000035ec9254c590705ad406855\": container with ID starting with 33bc5ec6398e2e811feaca5df94eb8ec9a7ab000035ec9254c590705ad406855 not found: ID does not exist" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.702563 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:54:42 crc kubenswrapper[4718]: E1124 08:54:42.702806 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f446bd33-b08a-4730-8ccc-c026cbc03ce9" containerName="glance-httpd" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.702817 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="f446bd33-b08a-4730-8ccc-c026cbc03ce9" containerName="glance-httpd" Nov 24 08:54:42 crc kubenswrapper[4718]: E1124 08:54:42.702852 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f446bd33-b08a-4730-8ccc-c026cbc03ce9" containerName="glance-log" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.702858 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="f446bd33-b08a-4730-8ccc-c026cbc03ce9" containerName="glance-log" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.703035 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="f446bd33-b08a-4730-8ccc-c026cbc03ce9" containerName="glance-log" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.703050 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="f446bd33-b08a-4730-8ccc-c026cbc03ce9" containerName="glance-httpd" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.703779 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.706532 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-scripts" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.706709 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-zvq88" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.707013 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"cert-glance-default-internal-svc" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.707147 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-single-config-data" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.707267 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"combined-ca-bundle" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.707377 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"cert-glance-default-public-svc" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.720214 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.840875 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-config-data\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.840927 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-httpd-run\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.840972 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tvxx\" (UniqueName: \"kubernetes.io/projected/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-kube-api-access-7tvxx\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.841088 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-logs\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.841179 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.841233 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-scripts\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.841308 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-internal-tls-certs\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.841349 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-public-tls-certs\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.841442 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-combined-ca-bundle\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.943423 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-config-data\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.943462 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-httpd-run\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.943492 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tvxx\" (UniqueName: \"kubernetes.io/projected/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-kube-api-access-7tvxx\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.943513 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-logs\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.943542 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.943564 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-scripts\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.943592 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-internal-tls-certs\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.943613 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-public-tls-certs\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.943643 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-combined-ca-bundle\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.944665 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-logs\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.944700 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-httpd-run\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.945160 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") device mount path \"/mnt/openstack/pv09\"" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.949294 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-config-data\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.950497 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-scripts\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.951323 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-combined-ca-bundle\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.951682 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-public-tls-certs\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.956580 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-internal-tls-certs\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.964266 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tvxx\" (UniqueName: \"kubernetes.io/projected/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-kube-api-access-7tvxx\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:42 crc kubenswrapper[4718]: I1124 08:54:42.968359 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-single-0\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:43 crc kubenswrapper[4718]: I1124 08:54:43.027225 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:43 crc kubenswrapper[4718]: I1124 08:54:43.420355 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:54:43 crc kubenswrapper[4718]: W1124 08:54:43.425602 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5fdf06a0_7061_4ce3_b758_0fc23cb37e94.slice/crio-38f92d0a958985c32c84e89fc8818fa4314e12032aed615a1c51051da194b9f9 WatchSource:0}: Error finding container 38f92d0a958985c32c84e89fc8818fa4314e12032aed615a1c51051da194b9f9: Status 404 returned error can't find the container with id 38f92d0a958985c32c84e89fc8818fa4314e12032aed615a1c51051da194b9f9 Nov 24 08:54:43 crc kubenswrapper[4718]: I1124 08:54:43.661270 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"5fdf06a0-7061-4ce3-b758-0fc23cb37e94","Type":"ContainerStarted","Data":"38f92d0a958985c32c84e89fc8818fa4314e12032aed615a1c51051da194b9f9"} Nov 24 08:54:44 crc kubenswrapper[4718]: I1124 08:54:44.605004 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f446bd33-b08a-4730-8ccc-c026cbc03ce9" path="/var/lib/kubelet/pods/f446bd33-b08a-4730-8ccc-c026cbc03ce9/volumes" Nov 24 08:54:44 crc kubenswrapper[4718]: I1124 08:54:44.672140 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"5fdf06a0-7061-4ce3-b758-0fc23cb37e94","Type":"ContainerStarted","Data":"96933d63fb18b048dec3fa5a04f1ed1270db3540e098bd36ed9903b95afec0ec"} Nov 24 08:54:44 crc kubenswrapper[4718]: I1124 08:54:44.672189 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"5fdf06a0-7061-4ce3-b758-0fc23cb37e94","Type":"ContainerStarted","Data":"63acd7c0216f28dc043b85c41e1a2d02711ef285eab49de6e0734c01f2141bdf"} Nov 24 08:54:44 crc kubenswrapper[4718]: I1124 08:54:44.691286 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-single-0" podStartSLOduration=2.691268832 podStartE2EDuration="2.691268832s" podCreationTimestamp="2025-11-24 08:54:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:54:44.688254947 +0000 UTC m=+1156.804545851" watchObservedRunningTime="2025-11-24 08:54:44.691268832 +0000 UTC m=+1156.807559736" Nov 24 08:54:52 crc kubenswrapper[4718]: I1124 08:54:52.044634 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:54:52 crc kubenswrapper[4718]: I1124 08:54:52.045242 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:54:52 crc kubenswrapper[4718]: I1124 08:54:52.045283 4718 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:54:52 crc kubenswrapper[4718]: I1124 08:54:52.045941 4718 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"aa05c9aab5bce7122c842c494e5738a78a924285d3cae2d6dd4b40d0c97d9b86"} pod="openshift-machine-config-operator/machine-config-daemon-575gl" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 08:54:52 crc kubenswrapper[4718]: I1124 08:54:52.046025 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" containerID="cri-o://aa05c9aab5bce7122c842c494e5738a78a924285d3cae2d6dd4b40d0c97d9b86" gracePeriod=600 Nov 24 08:54:52 crc kubenswrapper[4718]: I1124 08:54:52.742781 4718 generic.go:334] "Generic (PLEG): container finished" podID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerID="aa05c9aab5bce7122c842c494e5738a78a924285d3cae2d6dd4b40d0c97d9b86" exitCode=0 Nov 24 08:54:52 crc kubenswrapper[4718]: I1124 08:54:52.742846 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerDied","Data":"aa05c9aab5bce7122c842c494e5738a78a924285d3cae2d6dd4b40d0c97d9b86"} Nov 24 08:54:52 crc kubenswrapper[4718]: I1124 08:54:52.743143 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerStarted","Data":"05b60275224e52d49ea67f65feb883fbf9bd70bf35c0916e6ca86fad0a473f7d"} Nov 24 08:54:52 crc kubenswrapper[4718]: I1124 08:54:52.743169 4718 scope.go:117] "RemoveContainer" containerID="1a5f70c58a45eccf71c6de8475549daad92f17e19b44d32bf6a0b7edbca6ed9f" Nov 24 08:54:53 crc kubenswrapper[4718]: I1124 08:54:53.028373 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:53 crc kubenswrapper[4718]: I1124 08:54:53.028668 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:53 crc kubenswrapper[4718]: I1124 08:54:53.053673 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:53 crc kubenswrapper[4718]: I1124 08:54:53.067028 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:53 crc kubenswrapper[4718]: I1124 08:54:53.753929 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:53 crc kubenswrapper[4718]: I1124 08:54:53.754367 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:55 crc kubenswrapper[4718]: I1124 08:54:55.832230 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:55 crc kubenswrapper[4718]: I1124 08:54:55.832626 4718 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 08:54:55 crc kubenswrapper[4718]: I1124 08:54:55.834318 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:54:56 crc kubenswrapper[4718]: I1124 08:54:56.770391 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-db-sync-5zrsx"] Nov 24 08:54:56 crc kubenswrapper[4718]: I1124 08:54:56.779150 4718 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="glance-kuttl-tests/glance-default-single-0" secret="" err="secret \"glance-glance-dockercfg-zvq88\" not found" Nov 24 08:54:56 crc kubenswrapper[4718]: I1124 08:54:56.779218 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-db-sync-5zrsx"] Nov 24 08:54:56 crc kubenswrapper[4718]: E1124 08:54:56.845273 4718 secret.go:188] Couldn't get secret glance-kuttl-tests/glance-scripts: secret "glance-scripts" not found Nov 24 08:54:56 crc kubenswrapper[4718]: E1124 08:54:56.845507 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-scripts podName:5fdf06a0-7061-4ce3-b758-0fc23cb37e94 nodeName:}" failed. No retries permitted until 2025-11-24 08:54:57.345489009 +0000 UTC m=+1169.461779913 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-scripts") pod "glance-default-single-0" (UID: "5fdf06a0-7061-4ce3-b758-0fc23cb37e94") : secret "glance-scripts" not found Nov 24 08:54:56 crc kubenswrapper[4718]: E1124 08:54:56.846285 4718 secret.go:188] Couldn't get secret glance-kuttl-tests/glance-default-single-config-data: secret "glance-default-single-config-data" not found Nov 24 08:54:56 crc kubenswrapper[4718]: E1124 08:54:56.846322 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-config-data podName:5fdf06a0-7061-4ce3-b758-0fc23cb37e94 nodeName:}" failed. No retries permitted until 2025-11-24 08:54:57.34631362 +0000 UTC m=+1169.462604524 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-config-data") pod "glance-default-single-0" (UID: "5fdf06a0-7061-4ce3-b758-0fc23cb37e94") : secret "glance-default-single-config-data" not found Nov 24 08:54:56 crc kubenswrapper[4718]: I1124 08:54:56.869052 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glancef30c-account-delete-t4bbm"] Nov 24 08:54:56 crc kubenswrapper[4718]: I1124 08:54:56.870173 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glancef30c-account-delete-t4bbm" Nov 24 08:54:56 crc kubenswrapper[4718]: I1124 08:54:56.875068 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glancef30c-account-delete-t4bbm"] Nov 24 08:54:56 crc kubenswrapper[4718]: I1124 08:54:56.880897 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:54:56 crc kubenswrapper[4718]: I1124 08:54:56.945012 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qb75\" (UniqueName: \"kubernetes.io/projected/742e6748-7d97-4848-a25d-8766acc31411-kube-api-access-5qb75\") pod \"glancef30c-account-delete-t4bbm\" (UID: \"742e6748-7d97-4848-a25d-8766acc31411\") " pod="glance-kuttl-tests/glancef30c-account-delete-t4bbm" Nov 24 08:54:56 crc kubenswrapper[4718]: I1124 08:54:56.945082 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/742e6748-7d97-4848-a25d-8766acc31411-operator-scripts\") pod \"glancef30c-account-delete-t4bbm\" (UID: \"742e6748-7d97-4848-a25d-8766acc31411\") " pod="glance-kuttl-tests/glancef30c-account-delete-t4bbm" Nov 24 08:54:57 crc kubenswrapper[4718]: I1124 08:54:57.046869 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qb75\" (UniqueName: \"kubernetes.io/projected/742e6748-7d97-4848-a25d-8766acc31411-kube-api-access-5qb75\") pod \"glancef30c-account-delete-t4bbm\" (UID: \"742e6748-7d97-4848-a25d-8766acc31411\") " pod="glance-kuttl-tests/glancef30c-account-delete-t4bbm" Nov 24 08:54:57 crc kubenswrapper[4718]: I1124 08:54:57.046948 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/742e6748-7d97-4848-a25d-8766acc31411-operator-scripts\") pod \"glancef30c-account-delete-t4bbm\" (UID: \"742e6748-7d97-4848-a25d-8766acc31411\") " pod="glance-kuttl-tests/glancef30c-account-delete-t4bbm" Nov 24 08:54:57 crc kubenswrapper[4718]: I1124 08:54:57.047999 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/742e6748-7d97-4848-a25d-8766acc31411-operator-scripts\") pod \"glancef30c-account-delete-t4bbm\" (UID: \"742e6748-7d97-4848-a25d-8766acc31411\") " pod="glance-kuttl-tests/glancef30c-account-delete-t4bbm" Nov 24 08:54:57 crc kubenswrapper[4718]: I1124 08:54:57.067546 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qb75\" (UniqueName: \"kubernetes.io/projected/742e6748-7d97-4848-a25d-8766acc31411-kube-api-access-5qb75\") pod \"glancef30c-account-delete-t4bbm\" (UID: \"742e6748-7d97-4848-a25d-8766acc31411\") " pod="glance-kuttl-tests/glancef30c-account-delete-t4bbm" Nov 24 08:54:57 crc kubenswrapper[4718]: I1124 08:54:57.192739 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glancef30c-account-delete-t4bbm" Nov 24 08:54:57 crc kubenswrapper[4718]: E1124 08:54:57.351725 4718 secret.go:188] Couldn't get secret glance-kuttl-tests/glance-scripts: secret "glance-scripts" not found Nov 24 08:54:57 crc kubenswrapper[4718]: E1124 08:54:57.352588 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-scripts podName:5fdf06a0-7061-4ce3-b758-0fc23cb37e94 nodeName:}" failed. No retries permitted until 2025-11-24 08:54:58.352566685 +0000 UTC m=+1170.468857589 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-scripts") pod "glance-default-single-0" (UID: "5fdf06a0-7061-4ce3-b758-0fc23cb37e94") : secret "glance-scripts" not found Nov 24 08:54:57 crc kubenswrapper[4718]: E1124 08:54:57.353012 4718 secret.go:188] Couldn't get secret glance-kuttl-tests/glance-default-single-config-data: secret "glance-default-single-config-data" not found Nov 24 08:54:57 crc kubenswrapper[4718]: E1124 08:54:57.353089 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-config-data podName:5fdf06a0-7061-4ce3-b758-0fc23cb37e94 nodeName:}" failed. No retries permitted until 2025-11-24 08:54:58.353071767 +0000 UTC m=+1170.469362671 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-config-data") pod "glance-default-single-0" (UID: "5fdf06a0-7061-4ce3-b758-0fc23cb37e94") : secret "glance-default-single-config-data" not found Nov 24 08:54:57 crc kubenswrapper[4718]: I1124 08:54:57.645024 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glancef30c-account-delete-t4bbm"] Nov 24 08:54:57 crc kubenswrapper[4718]: I1124 08:54:57.788784 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glancef30c-account-delete-t4bbm" event={"ID":"742e6748-7d97-4848-a25d-8766acc31411","Type":"ContainerStarted","Data":"ffff36b1bf6ac91de3ceda29fe16c0be3d9d6916ffa0c77f59b1e643adf990a8"} Nov 24 08:54:57 crc kubenswrapper[4718]: I1124 08:54:57.789060 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="5fdf06a0-7061-4ce3-b758-0fc23cb37e94" containerName="glance-log" containerID="cri-o://63acd7c0216f28dc043b85c41e1a2d02711ef285eab49de6e0734c01f2141bdf" gracePeriod=30 Nov 24 08:54:57 crc kubenswrapper[4718]: I1124 08:54:57.789206 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="5fdf06a0-7061-4ce3-b758-0fc23cb37e94" containerName="glance-httpd" containerID="cri-o://96933d63fb18b048dec3fa5a04f1ed1270db3540e098bd36ed9903b95afec0ec" gracePeriod=30 Nov 24 08:54:57 crc kubenswrapper[4718]: I1124 08:54:57.795209 4718 prober.go:107] "Probe failed" probeType="Readiness" pod="glance-kuttl-tests/glance-default-single-0" podUID="5fdf06a0-7061-4ce3-b758-0fc23cb37e94" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.112:9292/healthcheck\": EOF" Nov 24 08:54:58 crc kubenswrapper[4718]: E1124 08:54:58.368310 4718 secret.go:188] Couldn't get secret glance-kuttl-tests/glance-default-single-config-data: secret "glance-default-single-config-data" not found Nov 24 08:54:58 crc kubenswrapper[4718]: E1124 08:54:58.368437 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-config-data podName:5fdf06a0-7061-4ce3-b758-0fc23cb37e94 nodeName:}" failed. No retries permitted until 2025-11-24 08:55:00.368408977 +0000 UTC m=+1172.484699881 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-config-data") pod "glance-default-single-0" (UID: "5fdf06a0-7061-4ce3-b758-0fc23cb37e94") : secret "glance-default-single-config-data" not found Nov 24 08:54:58 crc kubenswrapper[4718]: E1124 08:54:58.368310 4718 secret.go:188] Couldn't get secret glance-kuttl-tests/glance-scripts: secret "glance-scripts" not found Nov 24 08:54:58 crc kubenswrapper[4718]: E1124 08:54:58.368755 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-scripts podName:5fdf06a0-7061-4ce3-b758-0fc23cb37e94 nodeName:}" failed. No retries permitted until 2025-11-24 08:55:00.368708784 +0000 UTC m=+1172.484999758 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-scripts") pod "glance-default-single-0" (UID: "5fdf06a0-7061-4ce3-b758-0fc23cb37e94") : secret "glance-scripts" not found Nov 24 08:54:58 crc kubenswrapper[4718]: I1124 08:54:58.604413 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d7425d6-7136-4893-8476-cec35b856f17" path="/var/lib/kubelet/pods/5d7425d6-7136-4893-8476-cec35b856f17/volumes" Nov 24 08:54:58 crc kubenswrapper[4718]: I1124 08:54:58.798467 4718 generic.go:334] "Generic (PLEG): container finished" podID="5fdf06a0-7061-4ce3-b758-0fc23cb37e94" containerID="63acd7c0216f28dc043b85c41e1a2d02711ef285eab49de6e0734c01f2141bdf" exitCode=143 Nov 24 08:54:58 crc kubenswrapper[4718]: I1124 08:54:58.798564 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"5fdf06a0-7061-4ce3-b758-0fc23cb37e94","Type":"ContainerDied","Data":"63acd7c0216f28dc043b85c41e1a2d02711ef285eab49de6e0734c01f2141bdf"} Nov 24 08:54:58 crc kubenswrapper[4718]: I1124 08:54:58.800063 4718 generic.go:334] "Generic (PLEG): container finished" podID="742e6748-7d97-4848-a25d-8766acc31411" containerID="c86ae82377eca1533d390bf3eee4a32ec3ae8368ae2a2cc492ebfd98063d4095" exitCode=0 Nov 24 08:54:58 crc kubenswrapper[4718]: I1124 08:54:58.800111 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glancef30c-account-delete-t4bbm" event={"ID":"742e6748-7d97-4848-a25d-8766acc31411","Type":"ContainerDied","Data":"c86ae82377eca1533d390bf3eee4a32ec3ae8368ae2a2cc492ebfd98063d4095"} Nov 24 08:55:00 crc kubenswrapper[4718]: I1124 08:55:00.067686 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glancef30c-account-delete-t4bbm" Nov 24 08:55:00 crc kubenswrapper[4718]: I1124 08:55:00.219670 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/742e6748-7d97-4848-a25d-8766acc31411-operator-scripts\") pod \"742e6748-7d97-4848-a25d-8766acc31411\" (UID: \"742e6748-7d97-4848-a25d-8766acc31411\") " Nov 24 08:55:00 crc kubenswrapper[4718]: I1124 08:55:00.219793 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qb75\" (UniqueName: \"kubernetes.io/projected/742e6748-7d97-4848-a25d-8766acc31411-kube-api-access-5qb75\") pod \"742e6748-7d97-4848-a25d-8766acc31411\" (UID: \"742e6748-7d97-4848-a25d-8766acc31411\") " Nov 24 08:55:00 crc kubenswrapper[4718]: I1124 08:55:00.220667 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/742e6748-7d97-4848-a25d-8766acc31411-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "742e6748-7d97-4848-a25d-8766acc31411" (UID: "742e6748-7d97-4848-a25d-8766acc31411"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:55:00 crc kubenswrapper[4718]: I1124 08:55:00.225169 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/742e6748-7d97-4848-a25d-8766acc31411-kube-api-access-5qb75" (OuterVolumeSpecName: "kube-api-access-5qb75") pod "742e6748-7d97-4848-a25d-8766acc31411" (UID: "742e6748-7d97-4848-a25d-8766acc31411"). InnerVolumeSpecName "kube-api-access-5qb75". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:55:00 crc kubenswrapper[4718]: I1124 08:55:00.321132 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qb75\" (UniqueName: \"kubernetes.io/projected/742e6748-7d97-4848-a25d-8766acc31411-kube-api-access-5qb75\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:00 crc kubenswrapper[4718]: I1124 08:55:00.321172 4718 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/742e6748-7d97-4848-a25d-8766acc31411-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:00 crc kubenswrapper[4718]: E1124 08:55:00.423057 4718 secret.go:188] Couldn't get secret glance-kuttl-tests/glance-scripts: secret "glance-scripts" not found Nov 24 08:55:00 crc kubenswrapper[4718]: E1124 08:55:00.423341 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-scripts podName:5fdf06a0-7061-4ce3-b758-0fc23cb37e94 nodeName:}" failed. No retries permitted until 2025-11-24 08:55:04.423323456 +0000 UTC m=+1176.539614360 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-scripts") pod "glance-default-single-0" (UID: "5fdf06a0-7061-4ce3-b758-0fc23cb37e94") : secret "glance-scripts" not found Nov 24 08:55:00 crc kubenswrapper[4718]: E1124 08:55:00.423057 4718 secret.go:188] Couldn't get secret glance-kuttl-tests/glance-default-single-config-data: secret "glance-default-single-config-data" not found Nov 24 08:55:00 crc kubenswrapper[4718]: E1124 08:55:00.423563 4718 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-config-data podName:5fdf06a0-7061-4ce3-b758-0fc23cb37e94 nodeName:}" failed. No retries permitted until 2025-11-24 08:55:04.423552121 +0000 UTC m=+1176.539843025 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-config-data") pod "glance-default-single-0" (UID: "5fdf06a0-7061-4ce3-b758-0fc23cb37e94") : secret "glance-default-single-config-data" not found Nov 24 08:55:00 crc kubenswrapper[4718]: I1124 08:55:00.815316 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glancef30c-account-delete-t4bbm" event={"ID":"742e6748-7d97-4848-a25d-8766acc31411","Type":"ContainerDied","Data":"ffff36b1bf6ac91de3ceda29fe16c0be3d9d6916ffa0c77f59b1e643adf990a8"} Nov 24 08:55:00 crc kubenswrapper[4718]: I1124 08:55:00.815375 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glancef30c-account-delete-t4bbm" Nov 24 08:55:00 crc kubenswrapper[4718]: I1124 08:55:00.815385 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ffff36b1bf6ac91de3ceda29fe16c0be3d9d6916ffa0c77f59b1e643adf990a8" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.538508 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.645205 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-combined-ca-bundle\") pod \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.645378 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-public-tls-certs\") pod \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.645415 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-internal-tls-certs\") pod \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.645453 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-scripts\") pod \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.645488 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-httpd-run\") pod \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.645565 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.645634 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-config-data\") pod \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.645677 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-logs\") pod \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.645739 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7tvxx\" (UniqueName: \"kubernetes.io/projected/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-kube-api-access-7tvxx\") pod \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\" (UID: \"5fdf06a0-7061-4ce3-b758-0fc23cb37e94\") " Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.646283 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5fdf06a0-7061-4ce3-b758-0fc23cb37e94" (UID: "5fdf06a0-7061-4ce3-b758-0fc23cb37e94"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.646432 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-logs" (OuterVolumeSpecName: "logs") pod "5fdf06a0-7061-4ce3-b758-0fc23cb37e94" (UID: "5fdf06a0-7061-4ce3-b758-0fc23cb37e94"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.652270 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-scripts" (OuterVolumeSpecName: "scripts") pod "5fdf06a0-7061-4ce3-b758-0fc23cb37e94" (UID: "5fdf06a0-7061-4ce3-b758-0fc23cb37e94"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.652427 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "5fdf06a0-7061-4ce3-b758-0fc23cb37e94" (UID: "5fdf06a0-7061-4ce3-b758-0fc23cb37e94"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.661671 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-kube-api-access-7tvxx" (OuterVolumeSpecName: "kube-api-access-7tvxx") pod "5fdf06a0-7061-4ce3-b758-0fc23cb37e94" (UID: "5fdf06a0-7061-4ce3-b758-0fc23cb37e94"). InnerVolumeSpecName "kube-api-access-7tvxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.680548 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5fdf06a0-7061-4ce3-b758-0fc23cb37e94" (UID: "5fdf06a0-7061-4ce3-b758-0fc23cb37e94"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.689436 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5fdf06a0-7061-4ce3-b758-0fc23cb37e94" (UID: "5fdf06a0-7061-4ce3-b758-0fc23cb37e94"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.690130 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5fdf06a0-7061-4ce3-b758-0fc23cb37e94" (UID: "5fdf06a0-7061-4ce3-b758-0fc23cb37e94"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.690885 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-config-data" (OuterVolumeSpecName: "config-data") pod "5fdf06a0-7061-4ce3-b758-0fc23cb37e94" (UID: "5fdf06a0-7061-4ce3-b758-0fc23cb37e94"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.748105 4718 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.748158 4718 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-logs\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.748172 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7tvxx\" (UniqueName: \"kubernetes.io/projected/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-kube-api-access-7tvxx\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.748235 4718 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.748249 4718 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.749110 4718 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.749138 4718 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.749150 4718 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fdf06a0-7061-4ce3-b758-0fc23cb37e94-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.749179 4718 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.764576 4718 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.827476 4718 generic.go:334] "Generic (PLEG): container finished" podID="5fdf06a0-7061-4ce3-b758-0fc23cb37e94" containerID="96933d63fb18b048dec3fa5a04f1ed1270db3540e098bd36ed9903b95afec0ec" exitCode=0 Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.827558 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"5fdf06a0-7061-4ce3-b758-0fc23cb37e94","Type":"ContainerDied","Data":"96933d63fb18b048dec3fa5a04f1ed1270db3540e098bd36ed9903b95afec0ec"} Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.827637 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"5fdf06a0-7061-4ce3-b758-0fc23cb37e94","Type":"ContainerDied","Data":"38f92d0a958985c32c84e89fc8818fa4314e12032aed615a1c51051da194b9f9"} Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.827657 4718 scope.go:117] "RemoveContainer" containerID="96933d63fb18b048dec3fa5a04f1ed1270db3540e098bd36ed9903b95afec0ec" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.828166 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.852444 4718 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.887756 4718 scope.go:117] "RemoveContainer" containerID="63acd7c0216f28dc043b85c41e1a2d02711ef285eab49de6e0734c01f2141bdf" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.888085 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.897013 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-db-create-wvjhf"] Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.908149 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-db-create-wvjhf"] Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.913096 4718 scope.go:117] "RemoveContainer" containerID="96933d63fb18b048dec3fa5a04f1ed1270db3540e098bd36ed9903b95afec0ec" Nov 24 08:55:01 crc kubenswrapper[4718]: E1124 08:55:01.913687 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96933d63fb18b048dec3fa5a04f1ed1270db3540e098bd36ed9903b95afec0ec\": container with ID starting with 96933d63fb18b048dec3fa5a04f1ed1270db3540e098bd36ed9903b95afec0ec not found: ID does not exist" containerID="96933d63fb18b048dec3fa5a04f1ed1270db3540e098bd36ed9903b95afec0ec" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.913716 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96933d63fb18b048dec3fa5a04f1ed1270db3540e098bd36ed9903b95afec0ec"} err="failed to get container status \"96933d63fb18b048dec3fa5a04f1ed1270db3540e098bd36ed9903b95afec0ec\": rpc error: code = NotFound desc = could not find container \"96933d63fb18b048dec3fa5a04f1ed1270db3540e098bd36ed9903b95afec0ec\": container with ID starting with 96933d63fb18b048dec3fa5a04f1ed1270db3540e098bd36ed9903b95afec0ec not found: ID does not exist" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.913736 4718 scope.go:117] "RemoveContainer" containerID="63acd7c0216f28dc043b85c41e1a2d02711ef285eab49de6e0734c01f2141bdf" Nov 24 08:55:01 crc kubenswrapper[4718]: E1124 08:55:01.914132 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63acd7c0216f28dc043b85c41e1a2d02711ef285eab49de6e0734c01f2141bdf\": container with ID starting with 63acd7c0216f28dc043b85c41e1a2d02711ef285eab49de6e0734c01f2141bdf not found: ID does not exist" containerID="63acd7c0216f28dc043b85c41e1a2d02711ef285eab49de6e0734c01f2141bdf" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.914156 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63acd7c0216f28dc043b85c41e1a2d02711ef285eab49de6e0734c01f2141bdf"} err="failed to get container status \"63acd7c0216f28dc043b85c41e1a2d02711ef285eab49de6e0734c01f2141bdf\": rpc error: code = NotFound desc = could not find container \"63acd7c0216f28dc043b85c41e1a2d02711ef285eab49de6e0734c01f2141bdf\": container with ID starting with 63acd7c0216f28dc043b85c41e1a2d02711ef285eab49de6e0734c01f2141bdf not found: ID does not exist" Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.916592 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.922698 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-f30c-account-create-update-dh9lq"] Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.928434 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glancef30c-account-delete-t4bbm"] Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.933187 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-f30c-account-create-update-dh9lq"] Nov 24 08:55:01 crc kubenswrapper[4718]: I1124 08:55:01.937442 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glancef30c-account-delete-t4bbm"] Nov 24 08:55:02 crc kubenswrapper[4718]: I1124 08:55:02.607808 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d098245-5f65-4b2d-a00a-6c3cc6c954e4" path="/var/lib/kubelet/pods/2d098245-5f65-4b2d-a00a-6c3cc6c954e4/volumes" Nov 24 08:55:02 crc kubenswrapper[4718]: I1124 08:55:02.609490 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fdf06a0-7061-4ce3-b758-0fc23cb37e94" path="/var/lib/kubelet/pods/5fdf06a0-7061-4ce3-b758-0fc23cb37e94/volumes" Nov 24 08:55:02 crc kubenswrapper[4718]: I1124 08:55:02.610148 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="742e6748-7d97-4848-a25d-8766acc31411" path="/var/lib/kubelet/pods/742e6748-7d97-4848-a25d-8766acc31411/volumes" Nov 24 08:55:02 crc kubenswrapper[4718]: I1124 08:55:02.611887 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5" path="/var/lib/kubelet/pods/fdefcc19-09ed-4f2e-a8a5-5ac3d5ed5dd5/volumes" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.477166 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-7b23-account-create-update-s5bmh"] Nov 24 08:55:03 crc kubenswrapper[4718]: E1124 08:55:03.478000 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fdf06a0-7061-4ce3-b758-0fc23cb37e94" containerName="glance-log" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.478022 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fdf06a0-7061-4ce3-b758-0fc23cb37e94" containerName="glance-log" Nov 24 08:55:03 crc kubenswrapper[4718]: E1124 08:55:03.478058 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="742e6748-7d97-4848-a25d-8766acc31411" containerName="mariadb-account-delete" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.478066 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="742e6748-7d97-4848-a25d-8766acc31411" containerName="mariadb-account-delete" Nov 24 08:55:03 crc kubenswrapper[4718]: E1124 08:55:03.478080 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fdf06a0-7061-4ce3-b758-0fc23cb37e94" containerName="glance-httpd" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.478088 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fdf06a0-7061-4ce3-b758-0fc23cb37e94" containerName="glance-httpd" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.478235 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fdf06a0-7061-4ce3-b758-0fc23cb37e94" containerName="glance-httpd" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.478260 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fdf06a0-7061-4ce3-b758-0fc23cb37e94" containerName="glance-log" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.478274 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="742e6748-7d97-4848-a25d-8766acc31411" containerName="mariadb-account-delete" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.478810 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-7b23-account-create-update-s5bmh" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.481304 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-db-create-b6tpd"] Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.482144 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-b6tpd" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.485006 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-db-secret" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.497166 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-7b23-account-create-update-s5bmh"] Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.502863 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-create-b6tpd"] Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.580433 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2bd0da2-6ed3-47c4-9984-3c48e00fddad-operator-scripts\") pod \"glance-db-create-b6tpd\" (UID: \"a2bd0da2-6ed3-47c4-9984-3c48e00fddad\") " pod="glance-kuttl-tests/glance-db-create-b6tpd" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.580490 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwzxk\" (UniqueName: \"kubernetes.io/projected/798e367e-b020-428b-a39d-36fd0f8a0082-kube-api-access-kwzxk\") pod \"glance-7b23-account-create-update-s5bmh\" (UID: \"798e367e-b020-428b-a39d-36fd0f8a0082\") " pod="glance-kuttl-tests/glance-7b23-account-create-update-s5bmh" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.580526 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbx5b\" (UniqueName: \"kubernetes.io/projected/a2bd0da2-6ed3-47c4-9984-3c48e00fddad-kube-api-access-cbx5b\") pod \"glance-db-create-b6tpd\" (UID: \"a2bd0da2-6ed3-47c4-9984-3c48e00fddad\") " pod="glance-kuttl-tests/glance-db-create-b6tpd" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.580587 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/798e367e-b020-428b-a39d-36fd0f8a0082-operator-scripts\") pod \"glance-7b23-account-create-update-s5bmh\" (UID: \"798e367e-b020-428b-a39d-36fd0f8a0082\") " pod="glance-kuttl-tests/glance-7b23-account-create-update-s5bmh" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.682383 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2bd0da2-6ed3-47c4-9984-3c48e00fddad-operator-scripts\") pod \"glance-db-create-b6tpd\" (UID: \"a2bd0da2-6ed3-47c4-9984-3c48e00fddad\") " pod="glance-kuttl-tests/glance-db-create-b6tpd" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.682469 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwzxk\" (UniqueName: \"kubernetes.io/projected/798e367e-b020-428b-a39d-36fd0f8a0082-kube-api-access-kwzxk\") pod \"glance-7b23-account-create-update-s5bmh\" (UID: \"798e367e-b020-428b-a39d-36fd0f8a0082\") " pod="glance-kuttl-tests/glance-7b23-account-create-update-s5bmh" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.682526 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbx5b\" (UniqueName: \"kubernetes.io/projected/a2bd0da2-6ed3-47c4-9984-3c48e00fddad-kube-api-access-cbx5b\") pod \"glance-db-create-b6tpd\" (UID: \"a2bd0da2-6ed3-47c4-9984-3c48e00fddad\") " pod="glance-kuttl-tests/glance-db-create-b6tpd" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.682580 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/798e367e-b020-428b-a39d-36fd0f8a0082-operator-scripts\") pod \"glance-7b23-account-create-update-s5bmh\" (UID: \"798e367e-b020-428b-a39d-36fd0f8a0082\") " pod="glance-kuttl-tests/glance-7b23-account-create-update-s5bmh" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.683833 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/798e367e-b020-428b-a39d-36fd0f8a0082-operator-scripts\") pod \"glance-7b23-account-create-update-s5bmh\" (UID: \"798e367e-b020-428b-a39d-36fd0f8a0082\") " pod="glance-kuttl-tests/glance-7b23-account-create-update-s5bmh" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.684029 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2bd0da2-6ed3-47c4-9984-3c48e00fddad-operator-scripts\") pod \"glance-db-create-b6tpd\" (UID: \"a2bd0da2-6ed3-47c4-9984-3c48e00fddad\") " pod="glance-kuttl-tests/glance-db-create-b6tpd" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.701574 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwzxk\" (UniqueName: \"kubernetes.io/projected/798e367e-b020-428b-a39d-36fd0f8a0082-kube-api-access-kwzxk\") pod \"glance-7b23-account-create-update-s5bmh\" (UID: \"798e367e-b020-428b-a39d-36fd0f8a0082\") " pod="glance-kuttl-tests/glance-7b23-account-create-update-s5bmh" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.702280 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbx5b\" (UniqueName: \"kubernetes.io/projected/a2bd0da2-6ed3-47c4-9984-3c48e00fddad-kube-api-access-cbx5b\") pod \"glance-db-create-b6tpd\" (UID: \"a2bd0da2-6ed3-47c4-9984-3c48e00fddad\") " pod="glance-kuttl-tests/glance-db-create-b6tpd" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.798361 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-7b23-account-create-update-s5bmh" Nov 24 08:55:03 crc kubenswrapper[4718]: I1124 08:55:03.807956 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-b6tpd" Nov 24 08:55:04 crc kubenswrapper[4718]: I1124 08:55:04.214127 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-7b23-account-create-update-s5bmh"] Nov 24 08:55:04 crc kubenswrapper[4718]: W1124 08:55:04.228186 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod798e367e_b020_428b_a39d_36fd0f8a0082.slice/crio-c8f05658bf056ef0a4476a2aa862c7effd54e549186462aa8253728ba8d2f0fb WatchSource:0}: Error finding container c8f05658bf056ef0a4476a2aa862c7effd54e549186462aa8253728ba8d2f0fb: Status 404 returned error can't find the container with id c8f05658bf056ef0a4476a2aa862c7effd54e549186462aa8253728ba8d2f0fb Nov 24 08:55:04 crc kubenswrapper[4718]: I1124 08:55:04.275821 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-create-b6tpd"] Nov 24 08:55:04 crc kubenswrapper[4718]: W1124 08:55:04.295606 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2bd0da2_6ed3_47c4_9984_3c48e00fddad.slice/crio-7d020e9b8b4259868dc1a0402b48f6211e1cda1b147fb75b54ed6de87c850c79 WatchSource:0}: Error finding container 7d020e9b8b4259868dc1a0402b48f6211e1cda1b147fb75b54ed6de87c850c79: Status 404 returned error can't find the container with id 7d020e9b8b4259868dc1a0402b48f6211e1cda1b147fb75b54ed6de87c850c79 Nov 24 08:55:04 crc kubenswrapper[4718]: I1124 08:55:04.850638 4718 generic.go:334] "Generic (PLEG): container finished" podID="a2bd0da2-6ed3-47c4-9984-3c48e00fddad" containerID="868519ed7bb67d46e1a25d8a297da18e6b2d10aa894885340958e83f48e16203" exitCode=0 Nov 24 08:55:04 crc kubenswrapper[4718]: I1124 08:55:04.850683 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-b6tpd" event={"ID":"a2bd0da2-6ed3-47c4-9984-3c48e00fddad","Type":"ContainerDied","Data":"868519ed7bb67d46e1a25d8a297da18e6b2d10aa894885340958e83f48e16203"} Nov 24 08:55:04 crc kubenswrapper[4718]: I1124 08:55:04.851056 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-b6tpd" event={"ID":"a2bd0da2-6ed3-47c4-9984-3c48e00fddad","Type":"ContainerStarted","Data":"7d020e9b8b4259868dc1a0402b48f6211e1cda1b147fb75b54ed6de87c850c79"} Nov 24 08:55:04 crc kubenswrapper[4718]: I1124 08:55:04.854347 4718 generic.go:334] "Generic (PLEG): container finished" podID="798e367e-b020-428b-a39d-36fd0f8a0082" containerID="b5406ddc0b50a9931338235e6278d32488abdfb5b667b841598eb1bcb866ee86" exitCode=0 Nov 24 08:55:04 crc kubenswrapper[4718]: I1124 08:55:04.854397 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-7b23-account-create-update-s5bmh" event={"ID":"798e367e-b020-428b-a39d-36fd0f8a0082","Type":"ContainerDied","Data":"b5406ddc0b50a9931338235e6278d32488abdfb5b667b841598eb1bcb866ee86"} Nov 24 08:55:04 crc kubenswrapper[4718]: I1124 08:55:04.854428 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-7b23-account-create-update-s5bmh" event={"ID":"798e367e-b020-428b-a39d-36fd0f8a0082","Type":"ContainerStarted","Data":"c8f05658bf056ef0a4476a2aa862c7effd54e549186462aa8253728ba8d2f0fb"} Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.178475 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-b6tpd" Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.184271 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-7b23-account-create-update-s5bmh" Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.322053 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2bd0da2-6ed3-47c4-9984-3c48e00fddad-operator-scripts\") pod \"a2bd0da2-6ed3-47c4-9984-3c48e00fddad\" (UID: \"a2bd0da2-6ed3-47c4-9984-3c48e00fddad\") " Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.322111 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwzxk\" (UniqueName: \"kubernetes.io/projected/798e367e-b020-428b-a39d-36fd0f8a0082-kube-api-access-kwzxk\") pod \"798e367e-b020-428b-a39d-36fd0f8a0082\" (UID: \"798e367e-b020-428b-a39d-36fd0f8a0082\") " Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.322173 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbx5b\" (UniqueName: \"kubernetes.io/projected/a2bd0da2-6ed3-47c4-9984-3c48e00fddad-kube-api-access-cbx5b\") pod \"a2bd0da2-6ed3-47c4-9984-3c48e00fddad\" (UID: \"a2bd0da2-6ed3-47c4-9984-3c48e00fddad\") " Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.322229 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/798e367e-b020-428b-a39d-36fd0f8a0082-operator-scripts\") pod \"798e367e-b020-428b-a39d-36fd0f8a0082\" (UID: \"798e367e-b020-428b-a39d-36fd0f8a0082\") " Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.322938 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/798e367e-b020-428b-a39d-36fd0f8a0082-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "798e367e-b020-428b-a39d-36fd0f8a0082" (UID: "798e367e-b020-428b-a39d-36fd0f8a0082"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.322964 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2bd0da2-6ed3-47c4-9984-3c48e00fddad-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a2bd0da2-6ed3-47c4-9984-3c48e00fddad" (UID: "a2bd0da2-6ed3-47c4-9984-3c48e00fddad"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.327728 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/798e367e-b020-428b-a39d-36fd0f8a0082-kube-api-access-kwzxk" (OuterVolumeSpecName: "kube-api-access-kwzxk") pod "798e367e-b020-428b-a39d-36fd0f8a0082" (UID: "798e367e-b020-428b-a39d-36fd0f8a0082"). InnerVolumeSpecName "kube-api-access-kwzxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.327821 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2bd0da2-6ed3-47c4-9984-3c48e00fddad-kube-api-access-cbx5b" (OuterVolumeSpecName: "kube-api-access-cbx5b") pod "a2bd0da2-6ed3-47c4-9984-3c48e00fddad" (UID: "a2bd0da2-6ed3-47c4-9984-3c48e00fddad"). InnerVolumeSpecName "kube-api-access-cbx5b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.424107 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbx5b\" (UniqueName: \"kubernetes.io/projected/a2bd0da2-6ed3-47c4-9984-3c48e00fddad-kube-api-access-cbx5b\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.424145 4718 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/798e367e-b020-428b-a39d-36fd0f8a0082-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.424154 4718 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2bd0da2-6ed3-47c4-9984-3c48e00fddad-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.424164 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwzxk\" (UniqueName: \"kubernetes.io/projected/798e367e-b020-428b-a39d-36fd0f8a0082-kube-api-access-kwzxk\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.874753 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-b6tpd" event={"ID":"a2bd0da2-6ed3-47c4-9984-3c48e00fddad","Type":"ContainerDied","Data":"7d020e9b8b4259868dc1a0402b48f6211e1cda1b147fb75b54ed6de87c850c79"} Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.874831 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d020e9b8b4259868dc1a0402b48f6211e1cda1b147fb75b54ed6de87c850c79" Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.874822 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-b6tpd" Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.878676 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-7b23-account-create-update-s5bmh" event={"ID":"798e367e-b020-428b-a39d-36fd0f8a0082","Type":"ContainerDied","Data":"c8f05658bf056ef0a4476a2aa862c7effd54e549186462aa8253728ba8d2f0fb"} Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.878727 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8f05658bf056ef0a4476a2aa862c7effd54e549186462aa8253728ba8d2f0fb" Nov 24 08:55:06 crc kubenswrapper[4718]: I1124 08:55:06.878801 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-7b23-account-create-update-s5bmh" Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.588239 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-db-sync-xs54k"] Nov 24 08:55:08 crc kubenswrapper[4718]: E1124 08:55:08.588774 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="798e367e-b020-428b-a39d-36fd0f8a0082" containerName="mariadb-account-create-update" Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.588788 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="798e367e-b020-428b-a39d-36fd0f8a0082" containerName="mariadb-account-create-update" Nov 24 08:55:08 crc kubenswrapper[4718]: E1124 08:55:08.588802 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2bd0da2-6ed3-47c4-9984-3c48e00fddad" containerName="mariadb-database-create" Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.588808 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2bd0da2-6ed3-47c4-9984-3c48e00fddad" containerName="mariadb-database-create" Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.588954 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2bd0da2-6ed3-47c4-9984-3c48e00fddad" containerName="mariadb-database-create" Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.588985 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="798e367e-b020-428b-a39d-36fd0f8a0082" containerName="mariadb-account-create-update" Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.589434 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-xs54k" Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.596081 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-sz2tc" Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.596811 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-config-data" Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.606420 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-sync-xs54k"] Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.759410 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94a221c9-1794-4a91-b03a-c42a9bdcfcb9-config-data\") pod \"glance-db-sync-xs54k\" (UID: \"94a221c9-1794-4a91-b03a-c42a9bdcfcb9\") " pod="glance-kuttl-tests/glance-db-sync-xs54k" Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.759469 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/94a221c9-1794-4a91-b03a-c42a9bdcfcb9-db-sync-config-data\") pod \"glance-db-sync-xs54k\" (UID: \"94a221c9-1794-4a91-b03a-c42a9bdcfcb9\") " pod="glance-kuttl-tests/glance-db-sync-xs54k" Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.759680 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9drg5\" (UniqueName: \"kubernetes.io/projected/94a221c9-1794-4a91-b03a-c42a9bdcfcb9-kube-api-access-9drg5\") pod \"glance-db-sync-xs54k\" (UID: \"94a221c9-1794-4a91-b03a-c42a9bdcfcb9\") " pod="glance-kuttl-tests/glance-db-sync-xs54k" Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.861204 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94a221c9-1794-4a91-b03a-c42a9bdcfcb9-config-data\") pod \"glance-db-sync-xs54k\" (UID: \"94a221c9-1794-4a91-b03a-c42a9bdcfcb9\") " pod="glance-kuttl-tests/glance-db-sync-xs54k" Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.861259 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/94a221c9-1794-4a91-b03a-c42a9bdcfcb9-db-sync-config-data\") pod \"glance-db-sync-xs54k\" (UID: \"94a221c9-1794-4a91-b03a-c42a9bdcfcb9\") " pod="glance-kuttl-tests/glance-db-sync-xs54k" Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.861303 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9drg5\" (UniqueName: \"kubernetes.io/projected/94a221c9-1794-4a91-b03a-c42a9bdcfcb9-kube-api-access-9drg5\") pod \"glance-db-sync-xs54k\" (UID: \"94a221c9-1794-4a91-b03a-c42a9bdcfcb9\") " pod="glance-kuttl-tests/glance-db-sync-xs54k" Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.875907 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/94a221c9-1794-4a91-b03a-c42a9bdcfcb9-db-sync-config-data\") pod \"glance-db-sync-xs54k\" (UID: \"94a221c9-1794-4a91-b03a-c42a9bdcfcb9\") " pod="glance-kuttl-tests/glance-db-sync-xs54k" Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.875960 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94a221c9-1794-4a91-b03a-c42a9bdcfcb9-config-data\") pod \"glance-db-sync-xs54k\" (UID: \"94a221c9-1794-4a91-b03a-c42a9bdcfcb9\") " pod="glance-kuttl-tests/glance-db-sync-xs54k" Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.880536 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9drg5\" (UniqueName: \"kubernetes.io/projected/94a221c9-1794-4a91-b03a-c42a9bdcfcb9-kube-api-access-9drg5\") pod \"glance-db-sync-xs54k\" (UID: \"94a221c9-1794-4a91-b03a-c42a9bdcfcb9\") " pod="glance-kuttl-tests/glance-db-sync-xs54k" Nov 24 08:55:08 crc kubenswrapper[4718]: I1124 08:55:08.907865 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-xs54k" Nov 24 08:55:09 crc kubenswrapper[4718]: I1124 08:55:09.336674 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-sync-xs54k"] Nov 24 08:55:09 crc kubenswrapper[4718]: W1124 08:55:09.345900 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94a221c9_1794_4a91_b03a_c42a9bdcfcb9.slice/crio-af1ce801dbd4e636ee2c19a06b3b19b7bec8292e600cb263f6b240a02d32548f WatchSource:0}: Error finding container af1ce801dbd4e636ee2c19a06b3b19b7bec8292e600cb263f6b240a02d32548f: Status 404 returned error can't find the container with id af1ce801dbd4e636ee2c19a06b3b19b7bec8292e600cb263f6b240a02d32548f Nov 24 08:55:09 crc kubenswrapper[4718]: I1124 08:55:09.906728 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-xs54k" event={"ID":"94a221c9-1794-4a91-b03a-c42a9bdcfcb9","Type":"ContainerStarted","Data":"2753676eb625477baf9352aa9145b9a630e77d1cfbd2ad80d3b6949a2f7b8a1f"} Nov 24 08:55:09 crc kubenswrapper[4718]: I1124 08:55:09.907078 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-xs54k" event={"ID":"94a221c9-1794-4a91-b03a-c42a9bdcfcb9","Type":"ContainerStarted","Data":"af1ce801dbd4e636ee2c19a06b3b19b7bec8292e600cb263f6b240a02d32548f"} Nov 24 08:55:09 crc kubenswrapper[4718]: I1124 08:55:09.927155 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-db-sync-xs54k" podStartSLOduration=1.927134004 podStartE2EDuration="1.927134004s" podCreationTimestamp="2025-11-24 08:55:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:55:09.922552551 +0000 UTC m=+1182.038843455" watchObservedRunningTime="2025-11-24 08:55:09.927134004 +0000 UTC m=+1182.043424908" Nov 24 08:55:12 crc kubenswrapper[4718]: I1124 08:55:12.934135 4718 generic.go:334] "Generic (PLEG): container finished" podID="94a221c9-1794-4a91-b03a-c42a9bdcfcb9" containerID="2753676eb625477baf9352aa9145b9a630e77d1cfbd2ad80d3b6949a2f7b8a1f" exitCode=0 Nov 24 08:55:12 crc kubenswrapper[4718]: I1124 08:55:12.934314 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-xs54k" event={"ID":"94a221c9-1794-4a91-b03a-c42a9bdcfcb9","Type":"ContainerDied","Data":"2753676eb625477baf9352aa9145b9a630e77d1cfbd2ad80d3b6949a2f7b8a1f"} Nov 24 08:55:14 crc kubenswrapper[4718]: I1124 08:55:14.242587 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-xs54k" Nov 24 08:55:14 crc kubenswrapper[4718]: I1124 08:55:14.347357 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9drg5\" (UniqueName: \"kubernetes.io/projected/94a221c9-1794-4a91-b03a-c42a9bdcfcb9-kube-api-access-9drg5\") pod \"94a221c9-1794-4a91-b03a-c42a9bdcfcb9\" (UID: \"94a221c9-1794-4a91-b03a-c42a9bdcfcb9\") " Nov 24 08:55:14 crc kubenswrapper[4718]: I1124 08:55:14.347749 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94a221c9-1794-4a91-b03a-c42a9bdcfcb9-config-data\") pod \"94a221c9-1794-4a91-b03a-c42a9bdcfcb9\" (UID: \"94a221c9-1794-4a91-b03a-c42a9bdcfcb9\") " Nov 24 08:55:14 crc kubenswrapper[4718]: I1124 08:55:14.347824 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/94a221c9-1794-4a91-b03a-c42a9bdcfcb9-db-sync-config-data\") pod \"94a221c9-1794-4a91-b03a-c42a9bdcfcb9\" (UID: \"94a221c9-1794-4a91-b03a-c42a9bdcfcb9\") " Nov 24 08:55:14 crc kubenswrapper[4718]: I1124 08:55:14.352403 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94a221c9-1794-4a91-b03a-c42a9bdcfcb9-kube-api-access-9drg5" (OuterVolumeSpecName: "kube-api-access-9drg5") pod "94a221c9-1794-4a91-b03a-c42a9bdcfcb9" (UID: "94a221c9-1794-4a91-b03a-c42a9bdcfcb9"). InnerVolumeSpecName "kube-api-access-9drg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:55:14 crc kubenswrapper[4718]: I1124 08:55:14.352992 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94a221c9-1794-4a91-b03a-c42a9bdcfcb9-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "94a221c9-1794-4a91-b03a-c42a9bdcfcb9" (UID: "94a221c9-1794-4a91-b03a-c42a9bdcfcb9"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:55:14 crc kubenswrapper[4718]: I1124 08:55:14.383133 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94a221c9-1794-4a91-b03a-c42a9bdcfcb9-config-data" (OuterVolumeSpecName: "config-data") pod "94a221c9-1794-4a91-b03a-c42a9bdcfcb9" (UID: "94a221c9-1794-4a91-b03a-c42a9bdcfcb9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:55:14 crc kubenswrapper[4718]: I1124 08:55:14.449548 4718 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/94a221c9-1794-4a91-b03a-c42a9bdcfcb9-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:14 crc kubenswrapper[4718]: I1124 08:55:14.449585 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9drg5\" (UniqueName: \"kubernetes.io/projected/94a221c9-1794-4a91-b03a-c42a9bdcfcb9-kube-api-access-9drg5\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:14 crc kubenswrapper[4718]: I1124 08:55:14.449596 4718 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94a221c9-1794-4a91-b03a-c42a9bdcfcb9-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:14 crc kubenswrapper[4718]: I1124 08:55:14.952825 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-xs54k" event={"ID":"94a221c9-1794-4a91-b03a-c42a9bdcfcb9","Type":"ContainerDied","Data":"af1ce801dbd4e636ee2c19a06b3b19b7bec8292e600cb263f6b240a02d32548f"} Nov 24 08:55:14 crc kubenswrapper[4718]: I1124 08:55:14.952868 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af1ce801dbd4e636ee2c19a06b3b19b7bec8292e600cb263f6b240a02d32548f" Nov 24 08:55:14 crc kubenswrapper[4718]: I1124 08:55:14.952932 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-xs54k" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.043630 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-external-api-0"] Nov 24 08:55:16 crc kubenswrapper[4718]: E1124 08:55:16.045046 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94a221c9-1794-4a91-b03a-c42a9bdcfcb9" containerName="glance-db-sync" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.045137 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="94a221c9-1794-4a91-b03a-c42a9bdcfcb9" containerName="glance-db-sync" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.045378 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="94a221c9-1794-4a91-b03a-c42a9bdcfcb9" containerName="glance-db-sync" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.046672 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.049728 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-scripts" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.049946 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-sz2tc" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.049954 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-external-config-data" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.071414 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-external-api-0"] Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.172990 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-dev\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.173063 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-etc-nvme\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.173154 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-var-locks-brick\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.173192 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-etc-iscsi\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.173233 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9bb2aba4-18b9-410b-bf51-57714efd5c42-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.173261 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-run\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.173286 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.173317 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bb2aba4-18b9-410b-bf51-57714efd5c42-scripts\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.173353 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-sys\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.173376 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bb2aba4-18b9-410b-bf51-57714efd5c42-config-data\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.173404 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.173435 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-lib-modules\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.173469 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bb2aba4-18b9-410b-bf51-57714efd5c42-logs\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.173507 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwv6q\" (UniqueName: \"kubernetes.io/projected/9bb2aba4-18b9-410b-bf51-57714efd5c42-kube-api-access-fwv6q\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.275146 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-dev\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.275409 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-etc-nvme\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.275512 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-var-locks-brick\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.275605 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-var-locks-brick\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.275263 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-dev\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.275543 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-etc-nvme\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.275809 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-etc-iscsi\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.275870 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9bb2aba4-18b9-410b-bf51-57714efd5c42-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.275900 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-run\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.275936 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.275960 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bb2aba4-18b9-410b-bf51-57714efd5c42-scripts\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.276007 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-sys\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.276033 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bb2aba4-18b9-410b-bf51-57714efd5c42-config-data\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.276056 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.276092 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-lib-modules\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.276121 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bb2aba4-18b9-410b-bf51-57714efd5c42-logs\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.276163 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwv6q\" (UniqueName: \"kubernetes.io/projected/9bb2aba4-18b9-410b-bf51-57714efd5c42-kube-api-access-fwv6q\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.276508 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-etc-iscsi\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.277060 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9bb2aba4-18b9-410b-bf51-57714efd5c42-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.277095 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-sys\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.277105 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-lib-modules\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.277134 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/9bb2aba4-18b9-410b-bf51-57714efd5c42-run\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.277380 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bb2aba4-18b9-410b-bf51-57714efd5c42-logs\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.277452 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") device mount path \"/mnt/openstack/pv10\"" pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.277557 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") device mount path \"/mnt/openstack/pv06\"" pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.283983 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bb2aba4-18b9-410b-bf51-57714efd5c42-config-data\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.294124 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bb2aba4-18b9-410b-bf51-57714efd5c42-scripts\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.304336 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwv6q\" (UniqueName: \"kubernetes.io/projected/9bb2aba4-18b9-410b-bf51-57714efd5c42-kube-api-access-fwv6q\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.307828 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.315804 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"9bb2aba4-18b9-410b-bf51-57714efd5c42\") " pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.365673 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.558143 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.560186 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.564322 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-internal-config-data" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.588741 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.680393 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-run\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.680437 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.680482 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.680504 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-var-locks-brick\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.680529 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-etc-nvme\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.680548 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.680570 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-dev\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.680605 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.680706 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.680770 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-sys\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.680879 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5lq4\" (UniqueName: \"kubernetes.io/projected/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-kube-api-access-f5lq4\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.680929 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-lib-modules\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.680951 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-logs\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.681030 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-etc-iscsi\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782041 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782076 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-var-locks-brick\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782096 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-etc-nvme\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782112 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782130 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-dev\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782151 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782171 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782189 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-sys\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782200 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-var-locks-brick\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782222 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5lq4\" (UniqueName: \"kubernetes.io/projected/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-kube-api-access-f5lq4\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782241 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-lib-modules\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782257 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-logs\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782275 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-etc-iscsi\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782324 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-run\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782343 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782351 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") device mount path \"/mnt/openstack/pv05\"" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782415 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-etc-nvme\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782448 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-dev\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782487 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-lib-modules\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782859 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.782906 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-sys\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.783208 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-etc-iscsi\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.783480 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-logs\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.783508 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-run\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.787434 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.789166 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") device mount path \"/mnt/openstack/pv12\"" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.790061 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.794230 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-external-api-0"] Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.817538 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5lq4\" (UniqueName: \"kubernetes.io/projected/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-kube-api-access-f5lq4\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.824283 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.831432 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.885051 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:16 crc kubenswrapper[4718]: I1124 08:55:16.973473 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-external-api-0" event={"ID":"9bb2aba4-18b9-410b-bf51-57714efd5c42","Type":"ContainerStarted","Data":"4865cfb88f24cd6f13553306e0128baeb95a87c77899c1a274c1eb7494f9832a"} Nov 24 08:55:17 crc kubenswrapper[4718]: I1124 08:55:17.184128 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Nov 24 08:55:17 crc kubenswrapper[4718]: I1124 08:55:17.198878 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Nov 24 08:55:17 crc kubenswrapper[4718]: I1124 08:55:17.984019 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc","Type":"ContainerStarted","Data":"d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71"} Nov 24 08:55:17 crc kubenswrapper[4718]: I1124 08:55:17.984567 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc","Type":"ContainerStarted","Data":"8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad"} Nov 24 08:55:17 crc kubenswrapper[4718]: I1124 08:55:17.984105 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-internal-api-0" podUID="a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" containerName="glance-log" containerID="cri-o://57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657" gracePeriod=30 Nov 24 08:55:17 crc kubenswrapper[4718]: I1124 08:55:17.984208 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-internal-api-0" podUID="a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" containerName="glance-httpd" containerID="cri-o://8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad" gracePeriod=30 Nov 24 08:55:17 crc kubenswrapper[4718]: I1124 08:55:17.984738 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc","Type":"ContainerStarted","Data":"57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657"} Nov 24 08:55:17 crc kubenswrapper[4718]: I1124 08:55:17.984754 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc","Type":"ContainerStarted","Data":"3b7b5f65acd13549a16b8138585a5a5e95272a546b4843a96bb626de142cc4bb"} Nov 24 08:55:17 crc kubenswrapper[4718]: I1124 08:55:17.984199 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-internal-api-0" podUID="a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" containerName="glance-api" containerID="cri-o://d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71" gracePeriod=30 Nov 24 08:55:17 crc kubenswrapper[4718]: I1124 08:55:17.988936 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-external-api-0" event={"ID":"9bb2aba4-18b9-410b-bf51-57714efd5c42","Type":"ContainerStarted","Data":"4717dfbfc88e4d7ace9b89fe885ed6a75da01b0d3451951cab72a0f1ec58579a"} Nov 24 08:55:17 crc kubenswrapper[4718]: I1124 08:55:17.989086 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-external-api-0" event={"ID":"9bb2aba4-18b9-410b-bf51-57714efd5c42","Type":"ContainerStarted","Data":"5b3e1cb9c6140d6e3c7445f8c6530365e9b622dbb3b0edecc1b7687597ff84a9"} Nov 24 08:55:17 crc kubenswrapper[4718]: I1124 08:55:17.989180 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-external-api-0" event={"ID":"9bb2aba4-18b9-410b-bf51-57714efd5c42","Type":"ContainerStarted","Data":"771770f16f082bc996f297550d6c7d8373f71fa15bf0cb33ae7685e47e872880"} Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.014945 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=3.014927734 podStartE2EDuration="3.014927734s" podCreationTimestamp="2025-11-24 08:55:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:55:18.009655284 +0000 UTC m=+1190.125946188" watchObservedRunningTime="2025-11-24 08:55:18.014927734 +0000 UTC m=+1190.131218628" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.044953 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-external-api-0" podStartSLOduration=2.044932325 podStartE2EDuration="2.044932325s" podCreationTimestamp="2025-11-24 08:55:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:55:18.044228857 +0000 UTC m=+1190.160519771" watchObservedRunningTime="2025-11-24 08:55:18.044932325 +0000 UTC m=+1190.161223229" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.413040 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.511851 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5lq4\" (UniqueName: \"kubernetes.io/projected/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-kube-api-access-f5lq4\") pod \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512203 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-scripts\") pod \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512253 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-logs\") pod \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512286 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-etc-nvme\") pod \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512331 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512362 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance-cache\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512383 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-lib-modules\") pod \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512404 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-httpd-run\") pod \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512440 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-config-data\") pod \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512474 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-etc-iscsi\") pod \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512489 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" (UID: "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512529 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-dev\") pod \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512548 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" (UID: "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512556 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-var-locks-brick\") pod \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512585 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" (UID: "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512620 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-sys\") pod \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512630 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-logs" (OuterVolumeSpecName: "logs") pod "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" (UID: "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512654 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-dev" (OuterVolumeSpecName: "dev") pod "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" (UID: "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512660 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-run\") pod \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\" (UID: \"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc\") " Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512682 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-run" (OuterVolumeSpecName: "run") pod "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" (UID: "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512684 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-sys" (OuterVolumeSpecName: "sys") pod "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" (UID: "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.513033 4718 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-logs\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.513052 4718 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-lib-modules\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.513065 4718 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-etc-iscsi\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.513077 4718 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-dev\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.513092 4718 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-var-locks-brick\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.513104 4718 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-sys\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.513115 4718 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-run\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.513233 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" (UID: "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.512420 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" (UID: "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.518209 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance-cache") pod "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" (UID: "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.519688 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-scripts" (OuterVolumeSpecName: "scripts") pod "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" (UID: "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.521103 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" (UID: "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.522642 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-kube-api-access-f5lq4" (OuterVolumeSpecName: "kube-api-access-f5lq4") pod "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" (UID: "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc"). InnerVolumeSpecName "kube-api-access-f5lq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.597789 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-config-data" (OuterVolumeSpecName: "config-data") pod "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" (UID: "a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.614653 4718 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-etc-nvme\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.614747 4718 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.614766 4718 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.614777 4718 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.614788 4718 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.614798 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5lq4\" (UniqueName: \"kubernetes.io/projected/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-kube-api-access-f5lq4\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.614824 4718 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.629300 4718 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.641809 4718 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.716764 4718 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.717358 4718 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.998151 4718 generic.go:334] "Generic (PLEG): container finished" podID="a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" containerID="d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71" exitCode=143 Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.998179 4718 generic.go:334] "Generic (PLEG): container finished" podID="a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" containerID="8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad" exitCode=143 Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.998185 4718 generic.go:334] "Generic (PLEG): container finished" podID="a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" containerID="57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657" exitCode=143 Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.998214 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc","Type":"ContainerDied","Data":"d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71"} Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.998234 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.998261 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc","Type":"ContainerDied","Data":"8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad"} Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.998280 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc","Type":"ContainerDied","Data":"57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657"} Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.998293 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc","Type":"ContainerDied","Data":"3b7b5f65acd13549a16b8138585a5a5e95272a546b4843a96bb626de142cc4bb"} Nov 24 08:55:18 crc kubenswrapper[4718]: I1124 08:55:18.998302 4718 scope.go:117] "RemoveContainer" containerID="d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.025759 4718 scope.go:117] "RemoveContainer" containerID="8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.030343 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.041455 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.050983 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Nov 24 08:55:19 crc kubenswrapper[4718]: E1124 08:55:19.051276 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" containerName="glance-httpd" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.051293 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" containerName="glance-httpd" Nov 24 08:55:19 crc kubenswrapper[4718]: E1124 08:55:19.051322 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" containerName="glance-api" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.051328 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" containerName="glance-api" Nov 24 08:55:19 crc kubenswrapper[4718]: E1124 08:55:19.051365 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" containerName="glance-log" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.051372 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" containerName="glance-log" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.051484 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" containerName="glance-httpd" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.051498 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" containerName="glance-api" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.051510 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" containerName="glance-log" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.052456 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.054232 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-internal-config-data" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.054514 4718 scope.go:117] "RemoveContainer" containerID="57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.067383 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.080037 4718 scope.go:117] "RemoveContainer" containerID="d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71" Nov 24 08:55:19 crc kubenswrapper[4718]: E1124 08:55:19.082174 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71\": container with ID starting with d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71 not found: ID does not exist" containerID="d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.082215 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71"} err="failed to get container status \"d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71\": rpc error: code = NotFound desc = could not find container \"d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71\": container with ID starting with d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71 not found: ID does not exist" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.082248 4718 scope.go:117] "RemoveContainer" containerID="8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad" Nov 24 08:55:19 crc kubenswrapper[4718]: E1124 08:55:19.082639 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad\": container with ID starting with 8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad not found: ID does not exist" containerID="8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.082676 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad"} err="failed to get container status \"8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad\": rpc error: code = NotFound desc = could not find container \"8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad\": container with ID starting with 8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad not found: ID does not exist" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.082701 4718 scope.go:117] "RemoveContainer" containerID="57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657" Nov 24 08:55:19 crc kubenswrapper[4718]: E1124 08:55:19.083207 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657\": container with ID starting with 57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657 not found: ID does not exist" containerID="57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.083246 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657"} err="failed to get container status \"57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657\": rpc error: code = NotFound desc = could not find container \"57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657\": container with ID starting with 57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657 not found: ID does not exist" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.083301 4718 scope.go:117] "RemoveContainer" containerID="d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.084590 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71"} err="failed to get container status \"d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71\": rpc error: code = NotFound desc = could not find container \"d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71\": container with ID starting with d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71 not found: ID does not exist" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.084617 4718 scope.go:117] "RemoveContainer" containerID="8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.084909 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad"} err="failed to get container status \"8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad\": rpc error: code = NotFound desc = could not find container \"8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad\": container with ID starting with 8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad not found: ID does not exist" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.084935 4718 scope.go:117] "RemoveContainer" containerID="57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.085216 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657"} err="failed to get container status \"57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657\": rpc error: code = NotFound desc = could not find container \"57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657\": container with ID starting with 57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657 not found: ID does not exist" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.085248 4718 scope.go:117] "RemoveContainer" containerID="d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.088359 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71"} err="failed to get container status \"d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71\": rpc error: code = NotFound desc = could not find container \"d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71\": container with ID starting with d40d1c5ffc6e78aa37d24b8153cf8942bb4450849da2c08cbf64aba18cadcf71 not found: ID does not exist" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.088387 4718 scope.go:117] "RemoveContainer" containerID="8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.088874 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad"} err="failed to get container status \"8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad\": rpc error: code = NotFound desc = could not find container \"8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad\": container with ID starting with 8909f0baafe036b5d3eea01d689c6d7a55512f9ee0c6bfcd189b2401f60914ad not found: ID does not exist" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.088925 4718 scope.go:117] "RemoveContainer" containerID="57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.091258 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657"} err="failed to get container status \"57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657\": rpc error: code = NotFound desc = could not find container \"57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657\": container with ID starting with 57153fe23d5574d1f797719f1429cfb1b946074a1f4a02f0dded12d88df0a657 not found: ID does not exist" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.128963 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ae8afba-57a2-4856-abc2-923d7f6c609d-logs\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.129063 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0ae8afba-57a2-4856-abc2-923d7f6c609d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.129096 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-var-locks-brick\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.129411 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-run\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.129596 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-etc-iscsi\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.129718 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.129749 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-lib-modules\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.129936 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.130362 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ae8afba-57a2-4856-abc2-923d7f6c609d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.130413 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-dev\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.131046 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb8z9\" (UniqueName: \"kubernetes.io/projected/0ae8afba-57a2-4856-abc2-923d7f6c609d-kube-api-access-tb8z9\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.131162 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-sys\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.131240 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ae8afba-57a2-4856-abc2-923d7f6c609d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.131309 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-etc-nvme\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.232576 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.232633 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ae8afba-57a2-4856-abc2-923d7f6c609d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.232656 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-dev\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.232680 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb8z9\" (UniqueName: \"kubernetes.io/projected/0ae8afba-57a2-4856-abc2-923d7f6c609d-kube-api-access-tb8z9\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.232704 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-sys\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.232725 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ae8afba-57a2-4856-abc2-923d7f6c609d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.232754 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-etc-nvme\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.232768 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ae8afba-57a2-4856-abc2-923d7f6c609d-logs\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.232787 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-var-locks-brick\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.232803 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0ae8afba-57a2-4856-abc2-923d7f6c609d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.232825 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-run\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.232845 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-etc-iscsi\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.232875 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-lib-modules\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.232892 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.233152 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") device mount path \"/mnt/openstack/pv12\"" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.233395 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-sys\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.233535 4718 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") device mount path \"/mnt/openstack/pv05\"" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.233402 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-dev\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.234204 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0ae8afba-57a2-4856-abc2-923d7f6c609d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.234409 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ae8afba-57a2-4856-abc2-923d7f6c609d-logs\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.234403 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-var-locks-brick\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.234455 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-etc-iscsi\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.234483 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-run\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.234512 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-lib-modules\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.237691 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/0ae8afba-57a2-4856-abc2-923d7f6c609d-etc-nvme\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.239306 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ae8afba-57a2-4856-abc2-923d7f6c609d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.239652 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ae8afba-57a2-4856-abc2-923d7f6c609d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.250592 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb8z9\" (UniqueName: \"kubernetes.io/projected/0ae8afba-57a2-4856-abc2-923d7f6c609d-kube-api-access-tb8z9\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.253242 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.256809 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"0ae8afba-57a2-4856-abc2-923d7f6c609d\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.367758 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:19 crc kubenswrapper[4718]: I1124 08:55:19.655352 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Nov 24 08:55:19 crc kubenswrapper[4718]: W1124 08:55:19.665098 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ae8afba_57a2_4856_abc2_923d7f6c609d.slice/crio-bb00e815c4798ec64d9a3923b4979a1ea2d983d7624f794d46702e570e1ec07c WatchSource:0}: Error finding container bb00e815c4798ec64d9a3923b4979a1ea2d983d7624f794d46702e570e1ec07c: Status 404 returned error can't find the container with id bb00e815c4798ec64d9a3923b4979a1ea2d983d7624f794d46702e570e1ec07c Nov 24 08:55:20 crc kubenswrapper[4718]: I1124 08:55:20.007626 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"0ae8afba-57a2-4856-abc2-923d7f6c609d","Type":"ContainerStarted","Data":"ab7b97698d4f5b5d05c64db1a18c49a9aaf18524744a1bcd3d71a1d7dacbe660"} Nov 24 08:55:20 crc kubenswrapper[4718]: I1124 08:55:20.008194 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"0ae8afba-57a2-4856-abc2-923d7f6c609d","Type":"ContainerStarted","Data":"6dc8acedb894135e785883a2257727b8aa9c9e857b10c7146296b08f3cf56705"} Nov 24 08:55:20 crc kubenswrapper[4718]: I1124 08:55:20.008208 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"0ae8afba-57a2-4856-abc2-923d7f6c609d","Type":"ContainerStarted","Data":"bb00e815c4798ec64d9a3923b4979a1ea2d983d7624f794d46702e570e1ec07c"} Nov 24 08:55:20 crc kubenswrapper[4718]: I1124 08:55:20.606287 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc" path="/var/lib/kubelet/pods/a7cb45d0-edb5-41f3-bf0d-b3adc02f17dc/volumes" Nov 24 08:55:21 crc kubenswrapper[4718]: I1124 08:55:21.018435 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"0ae8afba-57a2-4856-abc2-923d7f6c609d","Type":"ContainerStarted","Data":"ca2d105f6f9739e814e29b0bb298b437f44929800aa0a58831ffccda4402305b"} Nov 24 08:55:26 crc kubenswrapper[4718]: I1124 08:55:26.366704 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:26 crc kubenswrapper[4718]: I1124 08:55:26.367357 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:26 crc kubenswrapper[4718]: I1124 08:55:26.367374 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:26 crc kubenswrapper[4718]: I1124 08:55:26.393341 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:26 crc kubenswrapper[4718]: I1124 08:55:26.393403 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:26 crc kubenswrapper[4718]: I1124 08:55:26.409980 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:26 crc kubenswrapper[4718]: I1124 08:55:26.420181 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=7.420102167 podStartE2EDuration="7.420102167s" podCreationTimestamp="2025-11-24 08:55:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 08:55:21.047538903 +0000 UTC m=+1193.163829807" watchObservedRunningTime="2025-11-24 08:55:26.420102167 +0000 UTC m=+1198.536393071" Nov 24 08:55:27 crc kubenswrapper[4718]: I1124 08:55:27.065693 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:27 crc kubenswrapper[4718]: I1124 08:55:27.066028 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:27 crc kubenswrapper[4718]: I1124 08:55:27.066040 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:27 crc kubenswrapper[4718]: I1124 08:55:27.077607 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:27 crc kubenswrapper[4718]: I1124 08:55:27.078315 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:27 crc kubenswrapper[4718]: I1124 08:55:27.078966 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-external-api-0" Nov 24 08:55:29 crc kubenswrapper[4718]: I1124 08:55:29.063893 4718 scope.go:117] "RemoveContainer" containerID="f96ed44d93c7607307f482bffc86f62f51ccd2cbb46fb3cc71032863d0102fce" Nov 24 08:55:29 crc kubenswrapper[4718]: I1124 08:55:29.102333 4718 scope.go:117] "RemoveContainer" containerID="39d3d3b56c6b2db2f4d0deb8e1fbc3c693b25320430fbd762c9c0d044bb34321" Nov 24 08:55:29 crc kubenswrapper[4718]: I1124 08:55:29.124313 4718 scope.go:117] "RemoveContainer" containerID="9f1b87e951d0566d5276682b0f880b53153b1bdd6cdbc7326c8bad01cf97fa7d" Nov 24 08:55:29 crc kubenswrapper[4718]: I1124 08:55:29.368358 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:29 crc kubenswrapper[4718]: I1124 08:55:29.368591 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:29 crc kubenswrapper[4718]: I1124 08:55:29.368871 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:29 crc kubenswrapper[4718]: I1124 08:55:29.405938 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:29 crc kubenswrapper[4718]: I1124 08:55:29.409120 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:29 crc kubenswrapper[4718]: I1124 08:55:29.412612 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:30 crc kubenswrapper[4718]: I1124 08:55:30.104760 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:30 crc kubenswrapper[4718]: I1124 08:55:30.104804 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:30 crc kubenswrapper[4718]: I1124 08:55:30.104814 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:30 crc kubenswrapper[4718]: I1124 08:55:30.119310 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:30 crc kubenswrapper[4718]: I1124 08:55:30.119376 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:55:30 crc kubenswrapper[4718]: I1124 08:55:30.120173 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-internal-api-0" Nov 24 08:56:52 crc kubenswrapper[4718]: I1124 08:56:52.045096 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:56:52 crc kubenswrapper[4718]: I1124 08:56:52.045675 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:57:22 crc kubenswrapper[4718]: I1124 08:57:22.044830 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:57:22 crc kubenswrapper[4718]: I1124 08:57:22.045405 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:57:52 crc kubenswrapper[4718]: I1124 08:57:52.045188 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:57:52 crc kubenswrapper[4718]: I1124 08:57:52.045758 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:57:52 crc kubenswrapper[4718]: I1124 08:57:52.045802 4718 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 08:57:52 crc kubenswrapper[4718]: I1124 08:57:52.046533 4718 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"05b60275224e52d49ea67f65feb883fbf9bd70bf35c0916e6ca86fad0a473f7d"} pod="openshift-machine-config-operator/machine-config-daemon-575gl" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 08:57:52 crc kubenswrapper[4718]: I1124 08:57:52.046601 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" containerID="cri-o://05b60275224e52d49ea67f65feb883fbf9bd70bf35c0916e6ca86fad0a473f7d" gracePeriod=600 Nov 24 08:57:52 crc kubenswrapper[4718]: I1124 08:57:52.284092 4718 generic.go:334] "Generic (PLEG): container finished" podID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerID="05b60275224e52d49ea67f65feb883fbf9bd70bf35c0916e6ca86fad0a473f7d" exitCode=0 Nov 24 08:57:52 crc kubenswrapper[4718]: I1124 08:57:52.284157 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerDied","Data":"05b60275224e52d49ea67f65feb883fbf9bd70bf35c0916e6ca86fad0a473f7d"} Nov 24 08:57:52 crc kubenswrapper[4718]: I1124 08:57:52.284515 4718 scope.go:117] "RemoveContainer" containerID="aa05c9aab5bce7122c842c494e5738a78a924285d3cae2d6dd4b40d0c97d9b86" Nov 24 08:57:53 crc kubenswrapper[4718]: I1124 08:57:53.294965 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerStarted","Data":"aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f"} Nov 24 08:59:23 crc kubenswrapper[4718]: I1124 08:59:23.867953 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-x69mk"] Nov 24 08:59:23 crc kubenswrapper[4718]: I1124 08:59:23.873461 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x69mk" Nov 24 08:59:23 crc kubenswrapper[4718]: I1124 08:59:23.884587 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x69mk"] Nov 24 08:59:23 crc kubenswrapper[4718]: I1124 08:59:23.966023 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1878b668-ef23-4e07-a0a5-316891c5e132-utilities\") pod \"community-operators-x69mk\" (UID: \"1878b668-ef23-4e07-a0a5-316891c5e132\") " pod="openshift-marketplace/community-operators-x69mk" Nov 24 08:59:23 crc kubenswrapper[4718]: I1124 08:59:23.966091 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhc7t\" (UniqueName: \"kubernetes.io/projected/1878b668-ef23-4e07-a0a5-316891c5e132-kube-api-access-dhc7t\") pod \"community-operators-x69mk\" (UID: \"1878b668-ef23-4e07-a0a5-316891c5e132\") " pod="openshift-marketplace/community-operators-x69mk" Nov 24 08:59:23 crc kubenswrapper[4718]: I1124 08:59:23.966181 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1878b668-ef23-4e07-a0a5-316891c5e132-catalog-content\") pod \"community-operators-x69mk\" (UID: \"1878b668-ef23-4e07-a0a5-316891c5e132\") " pod="openshift-marketplace/community-operators-x69mk" Nov 24 08:59:24 crc kubenswrapper[4718]: I1124 08:59:24.067632 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1878b668-ef23-4e07-a0a5-316891c5e132-utilities\") pod \"community-operators-x69mk\" (UID: \"1878b668-ef23-4e07-a0a5-316891c5e132\") " pod="openshift-marketplace/community-operators-x69mk" Nov 24 08:59:24 crc kubenswrapper[4718]: I1124 08:59:24.067696 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhc7t\" (UniqueName: \"kubernetes.io/projected/1878b668-ef23-4e07-a0a5-316891c5e132-kube-api-access-dhc7t\") pod \"community-operators-x69mk\" (UID: \"1878b668-ef23-4e07-a0a5-316891c5e132\") " pod="openshift-marketplace/community-operators-x69mk" Nov 24 08:59:24 crc kubenswrapper[4718]: I1124 08:59:24.067766 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1878b668-ef23-4e07-a0a5-316891c5e132-catalog-content\") pod \"community-operators-x69mk\" (UID: \"1878b668-ef23-4e07-a0a5-316891c5e132\") " pod="openshift-marketplace/community-operators-x69mk" Nov 24 08:59:24 crc kubenswrapper[4718]: I1124 08:59:24.068309 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1878b668-ef23-4e07-a0a5-316891c5e132-catalog-content\") pod \"community-operators-x69mk\" (UID: \"1878b668-ef23-4e07-a0a5-316891c5e132\") " pod="openshift-marketplace/community-operators-x69mk" Nov 24 08:59:24 crc kubenswrapper[4718]: I1124 08:59:24.068549 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1878b668-ef23-4e07-a0a5-316891c5e132-utilities\") pod \"community-operators-x69mk\" (UID: \"1878b668-ef23-4e07-a0a5-316891c5e132\") " pod="openshift-marketplace/community-operators-x69mk" Nov 24 08:59:24 crc kubenswrapper[4718]: I1124 08:59:24.089761 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhc7t\" (UniqueName: \"kubernetes.io/projected/1878b668-ef23-4e07-a0a5-316891c5e132-kube-api-access-dhc7t\") pod \"community-operators-x69mk\" (UID: \"1878b668-ef23-4e07-a0a5-316891c5e132\") " pod="openshift-marketplace/community-operators-x69mk" Nov 24 08:59:24 crc kubenswrapper[4718]: I1124 08:59:24.193743 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x69mk" Nov 24 08:59:24 crc kubenswrapper[4718]: I1124 08:59:24.508385 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x69mk"] Nov 24 08:59:24 crc kubenswrapper[4718]: I1124 08:59:24.929902 4718 generic.go:334] "Generic (PLEG): container finished" podID="1878b668-ef23-4e07-a0a5-316891c5e132" containerID="d7ce0eeee4a1905514a4c6768757fcbf196b7a03aad374cdba623733e6d8ea4c" exitCode=0 Nov 24 08:59:24 crc kubenswrapper[4718]: I1124 08:59:24.930102 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x69mk" event={"ID":"1878b668-ef23-4e07-a0a5-316891c5e132","Type":"ContainerDied","Data":"d7ce0eeee4a1905514a4c6768757fcbf196b7a03aad374cdba623733e6d8ea4c"} Nov 24 08:59:24 crc kubenswrapper[4718]: I1124 08:59:24.930233 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x69mk" event={"ID":"1878b668-ef23-4e07-a0a5-316891c5e132","Type":"ContainerStarted","Data":"36dd17b48a59f66a4546d0957b50a7400156bdc8d4ae360ee1986a4c91b433d0"} Nov 24 08:59:24 crc kubenswrapper[4718]: I1124 08:59:24.932997 4718 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 08:59:25 crc kubenswrapper[4718]: I1124 08:59:25.951426 4718 generic.go:334] "Generic (PLEG): container finished" podID="1878b668-ef23-4e07-a0a5-316891c5e132" containerID="75f3c51f5245499de4866a7467c3d947d94463f0eef194aeb34374a5b466aa8d" exitCode=0 Nov 24 08:59:25 crc kubenswrapper[4718]: I1124 08:59:25.951472 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x69mk" event={"ID":"1878b668-ef23-4e07-a0a5-316891c5e132","Type":"ContainerDied","Data":"75f3c51f5245499de4866a7467c3d947d94463f0eef194aeb34374a5b466aa8d"} Nov 24 08:59:26 crc kubenswrapper[4718]: I1124 08:59:26.962271 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x69mk" event={"ID":"1878b668-ef23-4e07-a0a5-316891c5e132","Type":"ContainerStarted","Data":"a800f17d05462906f78bc93daff0e7290873cf4e6f225f8d88a9091c2dbb12b1"} Nov 24 08:59:26 crc kubenswrapper[4718]: I1124 08:59:26.986908 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-x69mk" podStartSLOduration=2.566850815 podStartE2EDuration="3.986879068s" podCreationTimestamp="2025-11-24 08:59:23 +0000 UTC" firstStartedPulling="2025-11-24 08:59:24.932636252 +0000 UTC m=+1437.048927156" lastFinishedPulling="2025-11-24 08:59:26.352664505 +0000 UTC m=+1438.468955409" observedRunningTime="2025-11-24 08:59:26.984286535 +0000 UTC m=+1439.100577439" watchObservedRunningTime="2025-11-24 08:59:26.986879068 +0000 UTC m=+1439.103169972" Nov 24 08:59:29 crc kubenswrapper[4718]: I1124 08:59:29.358810 4718 scope.go:117] "RemoveContainer" containerID="9921b7b3f742da7b4785f5794c97d3f3d65aa89a2c464aa055237506fc49da01" Nov 24 08:59:29 crc kubenswrapper[4718]: I1124 08:59:29.403848 4718 scope.go:117] "RemoveContainer" containerID="abd276dcf11f3eb6dd7bbb9bc3b1b05bc15c6440b64eef5ed13117d57906df69" Nov 24 08:59:29 crc kubenswrapper[4718]: I1124 08:59:29.419682 4718 scope.go:117] "RemoveContainer" containerID="0f45731e8cd593fda95477aa9f0262cb9e5bda6cb963b256065af6b3b8a81fff" Nov 24 08:59:34 crc kubenswrapper[4718]: I1124 08:59:34.193912 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-x69mk" Nov 24 08:59:34 crc kubenswrapper[4718]: I1124 08:59:34.194416 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-x69mk" Nov 24 08:59:34 crc kubenswrapper[4718]: I1124 08:59:34.244994 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-x69mk" Nov 24 08:59:35 crc kubenswrapper[4718]: I1124 08:59:35.075519 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-x69mk" Nov 24 08:59:35 crc kubenswrapper[4718]: I1124 08:59:35.118324 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x69mk"] Nov 24 08:59:37 crc kubenswrapper[4718]: I1124 08:59:37.036704 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-x69mk" podUID="1878b668-ef23-4e07-a0a5-316891c5e132" containerName="registry-server" containerID="cri-o://a800f17d05462906f78bc93daff0e7290873cf4e6f225f8d88a9091c2dbb12b1" gracePeriod=2 Nov 24 08:59:37 crc kubenswrapper[4718]: I1124 08:59:37.918400 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x69mk" Nov 24 08:59:37 crc kubenswrapper[4718]: I1124 08:59:37.981521 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhc7t\" (UniqueName: \"kubernetes.io/projected/1878b668-ef23-4e07-a0a5-316891c5e132-kube-api-access-dhc7t\") pod \"1878b668-ef23-4e07-a0a5-316891c5e132\" (UID: \"1878b668-ef23-4e07-a0a5-316891c5e132\") " Nov 24 08:59:37 crc kubenswrapper[4718]: I1124 08:59:37.981685 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1878b668-ef23-4e07-a0a5-316891c5e132-utilities\") pod \"1878b668-ef23-4e07-a0a5-316891c5e132\" (UID: \"1878b668-ef23-4e07-a0a5-316891c5e132\") " Nov 24 08:59:37 crc kubenswrapper[4718]: I1124 08:59:37.981739 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1878b668-ef23-4e07-a0a5-316891c5e132-catalog-content\") pod \"1878b668-ef23-4e07-a0a5-316891c5e132\" (UID: \"1878b668-ef23-4e07-a0a5-316891c5e132\") " Nov 24 08:59:37 crc kubenswrapper[4718]: I1124 08:59:37.982657 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1878b668-ef23-4e07-a0a5-316891c5e132-utilities" (OuterVolumeSpecName: "utilities") pod "1878b668-ef23-4e07-a0a5-316891c5e132" (UID: "1878b668-ef23-4e07-a0a5-316891c5e132"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:59:37 crc kubenswrapper[4718]: I1124 08:59:37.990775 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1878b668-ef23-4e07-a0a5-316891c5e132-kube-api-access-dhc7t" (OuterVolumeSpecName: "kube-api-access-dhc7t") pod "1878b668-ef23-4e07-a0a5-316891c5e132" (UID: "1878b668-ef23-4e07-a0a5-316891c5e132"). InnerVolumeSpecName "kube-api-access-dhc7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.034701 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1878b668-ef23-4e07-a0a5-316891c5e132-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1878b668-ef23-4e07-a0a5-316891c5e132" (UID: "1878b668-ef23-4e07-a0a5-316891c5e132"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.045737 4718 generic.go:334] "Generic (PLEG): container finished" podID="1878b668-ef23-4e07-a0a5-316891c5e132" containerID="a800f17d05462906f78bc93daff0e7290873cf4e6f225f8d88a9091c2dbb12b1" exitCode=0 Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.045781 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x69mk" event={"ID":"1878b668-ef23-4e07-a0a5-316891c5e132","Type":"ContainerDied","Data":"a800f17d05462906f78bc93daff0e7290873cf4e6f225f8d88a9091c2dbb12b1"} Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.045808 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x69mk" event={"ID":"1878b668-ef23-4e07-a0a5-316891c5e132","Type":"ContainerDied","Data":"36dd17b48a59f66a4546d0957b50a7400156bdc8d4ae360ee1986a4c91b433d0"} Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.045826 4718 scope.go:117] "RemoveContainer" containerID="a800f17d05462906f78bc93daff0e7290873cf4e6f225f8d88a9091c2dbb12b1" Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.045938 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x69mk" Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.066146 4718 scope.go:117] "RemoveContainer" containerID="75f3c51f5245499de4866a7467c3d947d94463f0eef194aeb34374a5b466aa8d" Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.073933 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x69mk"] Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.084279 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-x69mk"] Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.086209 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1878b668-ef23-4e07-a0a5-316891c5e132-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.086241 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1878b668-ef23-4e07-a0a5-316891c5e132-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.086251 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhc7t\" (UniqueName: \"kubernetes.io/projected/1878b668-ef23-4e07-a0a5-316891c5e132-kube-api-access-dhc7t\") on node \"crc\" DevicePath \"\"" Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.102510 4718 scope.go:117] "RemoveContainer" containerID="d7ce0eeee4a1905514a4c6768757fcbf196b7a03aad374cdba623733e6d8ea4c" Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.119180 4718 scope.go:117] "RemoveContainer" containerID="a800f17d05462906f78bc93daff0e7290873cf4e6f225f8d88a9091c2dbb12b1" Nov 24 08:59:38 crc kubenswrapper[4718]: E1124 08:59:38.119626 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a800f17d05462906f78bc93daff0e7290873cf4e6f225f8d88a9091c2dbb12b1\": container with ID starting with a800f17d05462906f78bc93daff0e7290873cf4e6f225f8d88a9091c2dbb12b1 not found: ID does not exist" containerID="a800f17d05462906f78bc93daff0e7290873cf4e6f225f8d88a9091c2dbb12b1" Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.119704 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a800f17d05462906f78bc93daff0e7290873cf4e6f225f8d88a9091c2dbb12b1"} err="failed to get container status \"a800f17d05462906f78bc93daff0e7290873cf4e6f225f8d88a9091c2dbb12b1\": rpc error: code = NotFound desc = could not find container \"a800f17d05462906f78bc93daff0e7290873cf4e6f225f8d88a9091c2dbb12b1\": container with ID starting with a800f17d05462906f78bc93daff0e7290873cf4e6f225f8d88a9091c2dbb12b1 not found: ID does not exist" Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.119740 4718 scope.go:117] "RemoveContainer" containerID="75f3c51f5245499de4866a7467c3d947d94463f0eef194aeb34374a5b466aa8d" Nov 24 08:59:38 crc kubenswrapper[4718]: E1124 08:59:38.120249 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75f3c51f5245499de4866a7467c3d947d94463f0eef194aeb34374a5b466aa8d\": container with ID starting with 75f3c51f5245499de4866a7467c3d947d94463f0eef194aeb34374a5b466aa8d not found: ID does not exist" containerID="75f3c51f5245499de4866a7467c3d947d94463f0eef194aeb34374a5b466aa8d" Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.120295 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75f3c51f5245499de4866a7467c3d947d94463f0eef194aeb34374a5b466aa8d"} err="failed to get container status \"75f3c51f5245499de4866a7467c3d947d94463f0eef194aeb34374a5b466aa8d\": rpc error: code = NotFound desc = could not find container \"75f3c51f5245499de4866a7467c3d947d94463f0eef194aeb34374a5b466aa8d\": container with ID starting with 75f3c51f5245499de4866a7467c3d947d94463f0eef194aeb34374a5b466aa8d not found: ID does not exist" Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.120321 4718 scope.go:117] "RemoveContainer" containerID="d7ce0eeee4a1905514a4c6768757fcbf196b7a03aad374cdba623733e6d8ea4c" Nov 24 08:59:38 crc kubenswrapper[4718]: E1124 08:59:38.120706 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7ce0eeee4a1905514a4c6768757fcbf196b7a03aad374cdba623733e6d8ea4c\": container with ID starting with d7ce0eeee4a1905514a4c6768757fcbf196b7a03aad374cdba623733e6d8ea4c not found: ID does not exist" containerID="d7ce0eeee4a1905514a4c6768757fcbf196b7a03aad374cdba623733e6d8ea4c" Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.120746 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7ce0eeee4a1905514a4c6768757fcbf196b7a03aad374cdba623733e6d8ea4c"} err="failed to get container status \"d7ce0eeee4a1905514a4c6768757fcbf196b7a03aad374cdba623733e6d8ea4c\": rpc error: code = NotFound desc = could not find container \"d7ce0eeee4a1905514a4c6768757fcbf196b7a03aad374cdba623733e6d8ea4c\": container with ID starting with d7ce0eeee4a1905514a4c6768757fcbf196b7a03aad374cdba623733e6d8ea4c not found: ID does not exist" Nov 24 08:59:38 crc kubenswrapper[4718]: I1124 08:59:38.608345 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1878b668-ef23-4e07-a0a5-316891c5e132" path="/var/lib/kubelet/pods/1878b668-ef23-4e07-a0a5-316891c5e132/volumes" Nov 24 08:59:49 crc kubenswrapper[4718]: I1124 08:59:49.533859 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bcm9p"] Nov 24 08:59:49 crc kubenswrapper[4718]: E1124 08:59:49.534844 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1878b668-ef23-4e07-a0a5-316891c5e132" containerName="extract-utilities" Nov 24 08:59:49 crc kubenswrapper[4718]: I1124 08:59:49.534862 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="1878b668-ef23-4e07-a0a5-316891c5e132" containerName="extract-utilities" Nov 24 08:59:49 crc kubenswrapper[4718]: E1124 08:59:49.534895 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1878b668-ef23-4e07-a0a5-316891c5e132" containerName="registry-server" Nov 24 08:59:49 crc kubenswrapper[4718]: I1124 08:59:49.534903 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="1878b668-ef23-4e07-a0a5-316891c5e132" containerName="registry-server" Nov 24 08:59:49 crc kubenswrapper[4718]: E1124 08:59:49.534917 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1878b668-ef23-4e07-a0a5-316891c5e132" containerName="extract-content" Nov 24 08:59:49 crc kubenswrapper[4718]: I1124 08:59:49.534926 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="1878b668-ef23-4e07-a0a5-316891c5e132" containerName="extract-content" Nov 24 08:59:49 crc kubenswrapper[4718]: I1124 08:59:49.535445 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="1878b668-ef23-4e07-a0a5-316891c5e132" containerName="registry-server" Nov 24 08:59:49 crc kubenswrapper[4718]: I1124 08:59:49.536997 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bcm9p" Nov 24 08:59:49 crc kubenswrapper[4718]: I1124 08:59:49.542381 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bcm9p"] Nov 24 08:59:49 crc kubenswrapper[4718]: I1124 08:59:49.648357 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abd92df6-0726-441b-b9cd-2bd43343ba0e-utilities\") pod \"redhat-operators-bcm9p\" (UID: \"abd92df6-0726-441b-b9cd-2bd43343ba0e\") " pod="openshift-marketplace/redhat-operators-bcm9p" Nov 24 08:59:49 crc kubenswrapper[4718]: I1124 08:59:49.648440 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abd92df6-0726-441b-b9cd-2bd43343ba0e-catalog-content\") pod \"redhat-operators-bcm9p\" (UID: \"abd92df6-0726-441b-b9cd-2bd43343ba0e\") " pod="openshift-marketplace/redhat-operators-bcm9p" Nov 24 08:59:49 crc kubenswrapper[4718]: I1124 08:59:49.649673 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzpws\" (UniqueName: \"kubernetes.io/projected/abd92df6-0726-441b-b9cd-2bd43343ba0e-kube-api-access-dzpws\") pod \"redhat-operators-bcm9p\" (UID: \"abd92df6-0726-441b-b9cd-2bd43343ba0e\") " pod="openshift-marketplace/redhat-operators-bcm9p" Nov 24 08:59:49 crc kubenswrapper[4718]: I1124 08:59:49.750804 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abd92df6-0726-441b-b9cd-2bd43343ba0e-utilities\") pod \"redhat-operators-bcm9p\" (UID: \"abd92df6-0726-441b-b9cd-2bd43343ba0e\") " pod="openshift-marketplace/redhat-operators-bcm9p" Nov 24 08:59:49 crc kubenswrapper[4718]: I1124 08:59:49.750848 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abd92df6-0726-441b-b9cd-2bd43343ba0e-catalog-content\") pod \"redhat-operators-bcm9p\" (UID: \"abd92df6-0726-441b-b9cd-2bd43343ba0e\") " pod="openshift-marketplace/redhat-operators-bcm9p" Nov 24 08:59:49 crc kubenswrapper[4718]: I1124 08:59:49.750895 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzpws\" (UniqueName: \"kubernetes.io/projected/abd92df6-0726-441b-b9cd-2bd43343ba0e-kube-api-access-dzpws\") pod \"redhat-operators-bcm9p\" (UID: \"abd92df6-0726-441b-b9cd-2bd43343ba0e\") " pod="openshift-marketplace/redhat-operators-bcm9p" Nov 24 08:59:49 crc kubenswrapper[4718]: I1124 08:59:49.751415 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abd92df6-0726-441b-b9cd-2bd43343ba0e-utilities\") pod \"redhat-operators-bcm9p\" (UID: \"abd92df6-0726-441b-b9cd-2bd43343ba0e\") " pod="openshift-marketplace/redhat-operators-bcm9p" Nov 24 08:59:49 crc kubenswrapper[4718]: I1124 08:59:49.751480 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abd92df6-0726-441b-b9cd-2bd43343ba0e-catalog-content\") pod \"redhat-operators-bcm9p\" (UID: \"abd92df6-0726-441b-b9cd-2bd43343ba0e\") " pod="openshift-marketplace/redhat-operators-bcm9p" Nov 24 08:59:49 crc kubenswrapper[4718]: I1124 08:59:49.795990 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzpws\" (UniqueName: \"kubernetes.io/projected/abd92df6-0726-441b-b9cd-2bd43343ba0e-kube-api-access-dzpws\") pod \"redhat-operators-bcm9p\" (UID: \"abd92df6-0726-441b-b9cd-2bd43343ba0e\") " pod="openshift-marketplace/redhat-operators-bcm9p" Nov 24 08:59:49 crc kubenswrapper[4718]: I1124 08:59:49.856373 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bcm9p" Nov 24 08:59:50 crc kubenswrapper[4718]: I1124 08:59:50.277017 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bcm9p"] Nov 24 08:59:51 crc kubenswrapper[4718]: I1124 08:59:51.142568 4718 generic.go:334] "Generic (PLEG): container finished" podID="abd92df6-0726-441b-b9cd-2bd43343ba0e" containerID="e79cc6af8be8fafe897ed1afeefabe9001e35d473b5752d9530c08743b4b749d" exitCode=0 Nov 24 08:59:51 crc kubenswrapper[4718]: I1124 08:59:51.142701 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcm9p" event={"ID":"abd92df6-0726-441b-b9cd-2bd43343ba0e","Type":"ContainerDied","Data":"e79cc6af8be8fafe897ed1afeefabe9001e35d473b5752d9530c08743b4b749d"} Nov 24 08:59:51 crc kubenswrapper[4718]: I1124 08:59:51.142901 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcm9p" event={"ID":"abd92df6-0726-441b-b9cd-2bd43343ba0e","Type":"ContainerStarted","Data":"ba6adb19b6a42fb18637fcd0a14d5a81dfc0d37de830cab4e5a4c2e62e52f4a1"} Nov 24 08:59:52 crc kubenswrapper[4718]: I1124 08:59:52.045273 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 08:59:52 crc kubenswrapper[4718]: I1124 08:59:52.045641 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 08:59:52 crc kubenswrapper[4718]: I1124 08:59:52.151153 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcm9p" event={"ID":"abd92df6-0726-441b-b9cd-2bd43343ba0e","Type":"ContainerStarted","Data":"e06063edfdcdbdec8be5642064198ae35158cd9014ce01afd431dda091c74104"} Nov 24 08:59:53 crc kubenswrapper[4718]: I1124 08:59:53.160566 4718 generic.go:334] "Generic (PLEG): container finished" podID="abd92df6-0726-441b-b9cd-2bd43343ba0e" containerID="e06063edfdcdbdec8be5642064198ae35158cd9014ce01afd431dda091c74104" exitCode=0 Nov 24 08:59:53 crc kubenswrapper[4718]: I1124 08:59:53.160841 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcm9p" event={"ID":"abd92df6-0726-441b-b9cd-2bd43343ba0e","Type":"ContainerDied","Data":"e06063edfdcdbdec8be5642064198ae35158cd9014ce01afd431dda091c74104"} Nov 24 08:59:53 crc kubenswrapper[4718]: I1124 08:59:53.317900 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-l6mmf"] Nov 24 08:59:53 crc kubenswrapper[4718]: I1124 08:59:53.319307 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l6mmf" Nov 24 08:59:53 crc kubenswrapper[4718]: I1124 08:59:53.335186 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l6mmf"] Nov 24 08:59:53 crc kubenswrapper[4718]: I1124 08:59:53.412330 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0b5661f-b192-4105-8650-855923df4c2c-utilities\") pod \"certified-operators-l6mmf\" (UID: \"b0b5661f-b192-4105-8650-855923df4c2c\") " pod="openshift-marketplace/certified-operators-l6mmf" Nov 24 08:59:53 crc kubenswrapper[4718]: I1124 08:59:53.412431 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9t4b\" (UniqueName: \"kubernetes.io/projected/b0b5661f-b192-4105-8650-855923df4c2c-kube-api-access-v9t4b\") pod \"certified-operators-l6mmf\" (UID: \"b0b5661f-b192-4105-8650-855923df4c2c\") " pod="openshift-marketplace/certified-operators-l6mmf" Nov 24 08:59:53 crc kubenswrapper[4718]: I1124 08:59:53.412482 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0b5661f-b192-4105-8650-855923df4c2c-catalog-content\") pod \"certified-operators-l6mmf\" (UID: \"b0b5661f-b192-4105-8650-855923df4c2c\") " pod="openshift-marketplace/certified-operators-l6mmf" Nov 24 08:59:53 crc kubenswrapper[4718]: I1124 08:59:53.513422 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0b5661f-b192-4105-8650-855923df4c2c-catalog-content\") pod \"certified-operators-l6mmf\" (UID: \"b0b5661f-b192-4105-8650-855923df4c2c\") " pod="openshift-marketplace/certified-operators-l6mmf" Nov 24 08:59:53 crc kubenswrapper[4718]: I1124 08:59:53.513492 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0b5661f-b192-4105-8650-855923df4c2c-utilities\") pod \"certified-operators-l6mmf\" (UID: \"b0b5661f-b192-4105-8650-855923df4c2c\") " pod="openshift-marketplace/certified-operators-l6mmf" Nov 24 08:59:53 crc kubenswrapper[4718]: I1124 08:59:53.513563 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9t4b\" (UniqueName: \"kubernetes.io/projected/b0b5661f-b192-4105-8650-855923df4c2c-kube-api-access-v9t4b\") pod \"certified-operators-l6mmf\" (UID: \"b0b5661f-b192-4105-8650-855923df4c2c\") " pod="openshift-marketplace/certified-operators-l6mmf" Nov 24 08:59:53 crc kubenswrapper[4718]: I1124 08:59:53.513882 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0b5661f-b192-4105-8650-855923df4c2c-catalog-content\") pod \"certified-operators-l6mmf\" (UID: \"b0b5661f-b192-4105-8650-855923df4c2c\") " pod="openshift-marketplace/certified-operators-l6mmf" Nov 24 08:59:53 crc kubenswrapper[4718]: I1124 08:59:53.514121 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0b5661f-b192-4105-8650-855923df4c2c-utilities\") pod \"certified-operators-l6mmf\" (UID: \"b0b5661f-b192-4105-8650-855923df4c2c\") " pod="openshift-marketplace/certified-operators-l6mmf" Nov 24 08:59:53 crc kubenswrapper[4718]: I1124 08:59:53.537346 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9t4b\" (UniqueName: \"kubernetes.io/projected/b0b5661f-b192-4105-8650-855923df4c2c-kube-api-access-v9t4b\") pod \"certified-operators-l6mmf\" (UID: \"b0b5661f-b192-4105-8650-855923df4c2c\") " pod="openshift-marketplace/certified-operators-l6mmf" Nov 24 08:59:53 crc kubenswrapper[4718]: I1124 08:59:53.636911 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l6mmf" Nov 24 08:59:54 crc kubenswrapper[4718]: I1124 08:59:54.130381 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l6mmf"] Nov 24 08:59:54 crc kubenswrapper[4718]: W1124 08:59:54.143228 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb0b5661f_b192_4105_8650_855923df4c2c.slice/crio-cf21a25082201a67d3a90f162eff5679aa8a1ac0ac2a80a2d6da6555c25ec581 WatchSource:0}: Error finding container cf21a25082201a67d3a90f162eff5679aa8a1ac0ac2a80a2d6da6555c25ec581: Status 404 returned error can't find the container with id cf21a25082201a67d3a90f162eff5679aa8a1ac0ac2a80a2d6da6555c25ec581 Nov 24 08:59:54 crc kubenswrapper[4718]: I1124 08:59:54.169619 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l6mmf" event={"ID":"b0b5661f-b192-4105-8650-855923df4c2c","Type":"ContainerStarted","Data":"cf21a25082201a67d3a90f162eff5679aa8a1ac0ac2a80a2d6da6555c25ec581"} Nov 24 08:59:54 crc kubenswrapper[4718]: I1124 08:59:54.174788 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcm9p" event={"ID":"abd92df6-0726-441b-b9cd-2bd43343ba0e","Type":"ContainerStarted","Data":"e6f9b33bc76495edff5989b875ba47fa5876cc22fc1c24c3d9df77edd0c88e22"} Nov 24 08:59:54 crc kubenswrapper[4718]: I1124 08:59:54.205011 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bcm9p" podStartSLOduration=2.57275773 podStartE2EDuration="5.204985156s" podCreationTimestamp="2025-11-24 08:59:49 +0000 UTC" firstStartedPulling="2025-11-24 08:59:51.14490427 +0000 UTC m=+1463.261195184" lastFinishedPulling="2025-11-24 08:59:53.777131706 +0000 UTC m=+1465.893422610" observedRunningTime="2025-11-24 08:59:54.192389436 +0000 UTC m=+1466.308680340" watchObservedRunningTime="2025-11-24 08:59:54.204985156 +0000 UTC m=+1466.321276060" Nov 24 08:59:55 crc kubenswrapper[4718]: I1124 08:59:55.182340 4718 generic.go:334] "Generic (PLEG): container finished" podID="b0b5661f-b192-4105-8650-855923df4c2c" containerID="a8ab817eef0118a6572c73a55af63e8c5f0330036ff582e364e68eea89068ec6" exitCode=0 Nov 24 08:59:55 crc kubenswrapper[4718]: I1124 08:59:55.182409 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l6mmf" event={"ID":"b0b5661f-b192-4105-8650-855923df4c2c","Type":"ContainerDied","Data":"a8ab817eef0118a6572c73a55af63e8c5f0330036ff582e364e68eea89068ec6"} Nov 24 08:59:56 crc kubenswrapper[4718]: I1124 08:59:56.190995 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l6mmf" event={"ID":"b0b5661f-b192-4105-8650-855923df4c2c","Type":"ContainerStarted","Data":"ad1c403166b34959809eea75991cdd8e520d974b8a378a73c36ed23b148d2ee8"} Nov 24 08:59:58 crc kubenswrapper[4718]: I1124 08:59:58.206007 4718 generic.go:334] "Generic (PLEG): container finished" podID="b0b5661f-b192-4105-8650-855923df4c2c" containerID="ad1c403166b34959809eea75991cdd8e520d974b8a378a73c36ed23b148d2ee8" exitCode=0 Nov 24 08:59:58 crc kubenswrapper[4718]: I1124 08:59:58.206117 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l6mmf" event={"ID":"b0b5661f-b192-4105-8650-855923df4c2c","Type":"ContainerDied","Data":"ad1c403166b34959809eea75991cdd8e520d974b8a378a73c36ed23b148d2ee8"} Nov 24 08:59:59 crc kubenswrapper[4718]: I1124 08:59:59.216375 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l6mmf" event={"ID":"b0b5661f-b192-4105-8650-855923df4c2c","Type":"ContainerStarted","Data":"48428d9fc3ad899c25536ff719de5c0b87ce78b147a36f821dcfc5f4bd7a644d"} Nov 24 08:59:59 crc kubenswrapper[4718]: I1124 08:59:59.244949 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-l6mmf" podStartSLOduration=2.48339813 podStartE2EDuration="6.244928487s" podCreationTimestamp="2025-11-24 08:59:53 +0000 UTC" firstStartedPulling="2025-11-24 08:59:55.184316442 +0000 UTC m=+1467.300607346" lastFinishedPulling="2025-11-24 08:59:58.945846799 +0000 UTC m=+1471.062137703" observedRunningTime="2025-11-24 08:59:59.241289097 +0000 UTC m=+1471.357580011" watchObservedRunningTime="2025-11-24 08:59:59.244928487 +0000 UTC m=+1471.361219391" Nov 24 08:59:59 crc kubenswrapper[4718]: I1124 08:59:59.857234 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bcm9p" Nov 24 08:59:59 crc kubenswrapper[4718]: I1124 08:59:59.857275 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bcm9p" Nov 24 08:59:59 crc kubenswrapper[4718]: I1124 08:59:59.900111 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bcm9p" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.152103 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7"] Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.153310 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.156544 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.157808 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.160684 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm"] Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.161787 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.167564 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l"] Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.168783 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.174864 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7"] Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.181884 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm"] Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.188905 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l"] Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.207753 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-cache-glance-default-external-api-0-cleaner-2939958kl49l\" (UID: \"d585b8bf-d86a-40bd-b6ea-08fe8a8113ab\") " pod="glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.207813 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm\" (UID: \"fda669d9-910c-4494-b5bd-29658bc86875\") " pod="glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.207846 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnj6q\" (UniqueName: \"kubernetes.io/projected/d585b8bf-d86a-40bd-b6ea-08fe8a8113ab-kube-api-access-jnj6q\") pod \"glance-cache-glance-default-external-api-0-cleaner-2939958kl49l\" (UID: \"d585b8bf-d86a-40bd-b6ea-08fe8a8113ab\") " pod="glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.207872 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-cache-config-data\" (UniqueName: \"kubernetes.io/secret/d585b8bf-d86a-40bd-b6ea-08fe8a8113ab-image-cache-config-data\") pod \"glance-cache-glance-default-external-api-0-cleaner-2939958kl49l\" (UID: \"d585b8bf-d86a-40bd-b6ea-08fe8a8113ab\") " pod="glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.207898 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qg9qg\" (UniqueName: \"kubernetes.io/projected/fda669d9-910c-4494-b5bd-29658bc86875-kube-api-access-qg9qg\") pod \"glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm\" (UID: \"fda669d9-910c-4494-b5bd-29658bc86875\") " pod="glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.207916 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fb162263-4f59-4327-92e2-f82027a7b1cd-config-volume\") pod \"collect-profiles-29399580-n2wk7\" (UID: \"fb162263-4f59-4327-92e2-f82027a7b1cd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.207937 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-cache-config-data\" (UniqueName: \"kubernetes.io/secret/fda669d9-910c-4494-b5bd-29658bc86875-image-cache-config-data\") pod \"glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm\" (UID: \"fda669d9-910c-4494-b5bd-29658bc86875\") " pod="glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.207986 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdqgt\" (UniqueName: \"kubernetes.io/projected/fb162263-4f59-4327-92e2-f82027a7b1cd-kube-api-access-zdqgt\") pod \"collect-profiles-29399580-n2wk7\" (UID: \"fb162263-4f59-4327-92e2-f82027a7b1cd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.208025 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fb162263-4f59-4327-92e2-f82027a7b1cd-secret-volume\") pod \"collect-profiles-29399580-n2wk7\" (UID: \"fb162263-4f59-4327-92e2-f82027a7b1cd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.250465 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-cache-glance-default-external-api-0-cleaner-2939958kl49l\" (UID: \"d585b8bf-d86a-40bd-b6ea-08fe8a8113ab\") " pod="glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.258839 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm\" (UID: \"fda669d9-910c-4494-b5bd-29658bc86875\") " pod="glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.288383 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bcm9p" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.309215 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdqgt\" (UniqueName: \"kubernetes.io/projected/fb162263-4f59-4327-92e2-f82027a7b1cd-kube-api-access-zdqgt\") pod \"collect-profiles-29399580-n2wk7\" (UID: \"fb162263-4f59-4327-92e2-f82027a7b1cd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.309305 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fb162263-4f59-4327-92e2-f82027a7b1cd-secret-volume\") pod \"collect-profiles-29399580-n2wk7\" (UID: \"fb162263-4f59-4327-92e2-f82027a7b1cd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.309379 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnj6q\" (UniqueName: \"kubernetes.io/projected/d585b8bf-d86a-40bd-b6ea-08fe8a8113ab-kube-api-access-jnj6q\") pod \"glance-cache-glance-default-external-api-0-cleaner-2939958kl49l\" (UID: \"d585b8bf-d86a-40bd-b6ea-08fe8a8113ab\") " pod="glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.309413 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-cache-config-data\" (UniqueName: \"kubernetes.io/secret/d585b8bf-d86a-40bd-b6ea-08fe8a8113ab-image-cache-config-data\") pod \"glance-cache-glance-default-external-api-0-cleaner-2939958kl49l\" (UID: \"d585b8bf-d86a-40bd-b6ea-08fe8a8113ab\") " pod="glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.309450 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fb162263-4f59-4327-92e2-f82027a7b1cd-config-volume\") pod \"collect-profiles-29399580-n2wk7\" (UID: \"fb162263-4f59-4327-92e2-f82027a7b1cd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.309470 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qg9qg\" (UniqueName: \"kubernetes.io/projected/fda669d9-910c-4494-b5bd-29658bc86875-kube-api-access-qg9qg\") pod \"glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm\" (UID: \"fda669d9-910c-4494-b5bd-29658bc86875\") " pod="glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.309489 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-cache-config-data\" (UniqueName: \"kubernetes.io/secret/fda669d9-910c-4494-b5bd-29658bc86875-image-cache-config-data\") pod \"glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm\" (UID: \"fda669d9-910c-4494-b5bd-29658bc86875\") " pod="glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.311141 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fb162263-4f59-4327-92e2-f82027a7b1cd-config-volume\") pod \"collect-profiles-29399580-n2wk7\" (UID: \"fb162263-4f59-4327-92e2-f82027a7b1cd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.318453 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fb162263-4f59-4327-92e2-f82027a7b1cd-secret-volume\") pod \"collect-profiles-29399580-n2wk7\" (UID: \"fb162263-4f59-4327-92e2-f82027a7b1cd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.329109 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-cache-config-data\" (UniqueName: \"kubernetes.io/secret/fda669d9-910c-4494-b5bd-29658bc86875-image-cache-config-data\") pod \"glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm\" (UID: \"fda669d9-910c-4494-b5bd-29658bc86875\") " pod="glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.329694 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-cache-config-data\" (UniqueName: \"kubernetes.io/secret/d585b8bf-d86a-40bd-b6ea-08fe8a8113ab-image-cache-config-data\") pod \"glance-cache-glance-default-external-api-0-cleaner-2939958kl49l\" (UID: \"d585b8bf-d86a-40bd-b6ea-08fe8a8113ab\") " pod="glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.338606 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnj6q\" (UniqueName: \"kubernetes.io/projected/d585b8bf-d86a-40bd-b6ea-08fe8a8113ab-kube-api-access-jnj6q\") pod \"glance-cache-glance-default-external-api-0-cleaner-2939958kl49l\" (UID: \"d585b8bf-d86a-40bd-b6ea-08fe8a8113ab\") " pod="glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.338653 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdqgt\" (UniqueName: \"kubernetes.io/projected/fb162263-4f59-4327-92e2-f82027a7b1cd-kube-api-access-zdqgt\") pod \"collect-profiles-29399580-n2wk7\" (UID: \"fb162263-4f59-4327-92e2-f82027a7b1cd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.343497 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qg9qg\" (UniqueName: \"kubernetes.io/projected/fda669d9-910c-4494-b5bd-29658bc86875-kube-api-access-qg9qg\") pod \"glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm\" (UID: \"fda669d9-910c-4494-b5bd-29658bc86875\") " pod="glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.482891 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.498598 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.535576 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l" Nov 24 09:00:00 crc kubenswrapper[4718]: I1124 09:00:00.927745 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7"] Nov 24 09:00:00 crc kubenswrapper[4718]: W1124 09:00:00.934679 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfb162263_4f59_4327_92e2_f82027a7b1cd.slice/crio-c4badda91f2616a52c467c4078597f4892d924112ca40bfdb729cb70dfb02f29 WatchSource:0}: Error finding container c4badda91f2616a52c467c4078597f4892d924112ca40bfdb729cb70dfb02f29: Status 404 returned error can't find the container with id c4badda91f2616a52c467c4078597f4892d924112ca40bfdb729cb70dfb02f29 Nov 24 09:00:01 crc kubenswrapper[4718]: I1124 09:00:01.000606 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l"] Nov 24 09:00:01 crc kubenswrapper[4718]: I1124 09:00:01.012602 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm"] Nov 24 09:00:01 crc kubenswrapper[4718]: W1124 09:00:01.013374 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfda669d9_910c_4494_b5bd_29658bc86875.slice/crio-3119af3d781f51b124c23052cba5e914ca6e9122594d5a53e14e5739d2519a5b WatchSource:0}: Error finding container 3119af3d781f51b124c23052cba5e914ca6e9122594d5a53e14e5739d2519a5b: Status 404 returned error can't find the container with id 3119af3d781f51b124c23052cba5e914ca6e9122594d5a53e14e5739d2519a5b Nov 24 09:00:01 crc kubenswrapper[4718]: W1124 09:00:01.027268 4718 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd585b8bf_d86a_40bd_b6ea_08fe8a8113ab.slice/crio-d03e22526847226dcccc63d84211754153ef94d3641acc23c65aca0d436d1036 WatchSource:0}: Error finding container d03e22526847226dcccc63d84211754153ef94d3641acc23c65aca0d436d1036: Status 404 returned error can't find the container with id d03e22526847226dcccc63d84211754153ef94d3641acc23c65aca0d436d1036 Nov 24 09:00:01 crc kubenswrapper[4718]: I1124 09:00:01.119518 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bcm9p"] Nov 24 09:00:01 crc kubenswrapper[4718]: I1124 09:00:01.256263 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7" event={"ID":"fb162263-4f59-4327-92e2-f82027a7b1cd","Type":"ContainerStarted","Data":"50163af9d8f4d67e87d6150eb8a4311b6257dad5fdc3a403c2b3cfef93a62b4b"} Nov 24 09:00:01 crc kubenswrapper[4718]: I1124 09:00:01.256344 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7" event={"ID":"fb162263-4f59-4327-92e2-f82027a7b1cd","Type":"ContainerStarted","Data":"c4badda91f2616a52c467c4078597f4892d924112ca40bfdb729cb70dfb02f29"} Nov 24 09:00:01 crc kubenswrapper[4718]: I1124 09:00:01.261453 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l" event={"ID":"d585b8bf-d86a-40bd-b6ea-08fe8a8113ab","Type":"ContainerStarted","Data":"d03e22526847226dcccc63d84211754153ef94d3641acc23c65aca0d436d1036"} Nov 24 09:00:01 crc kubenswrapper[4718]: I1124 09:00:01.271665 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm" event={"ID":"fda669d9-910c-4494-b5bd-29658bc86875","Type":"ContainerStarted","Data":"3119af3d781f51b124c23052cba5e914ca6e9122594d5a53e14e5739d2519a5b"} Nov 24 09:00:02 crc kubenswrapper[4718]: I1124 09:00:02.283594 4718 generic.go:334] "Generic (PLEG): container finished" podID="fb162263-4f59-4327-92e2-f82027a7b1cd" containerID="50163af9d8f4d67e87d6150eb8a4311b6257dad5fdc3a403c2b3cfef93a62b4b" exitCode=0 Nov 24 09:00:02 crc kubenswrapper[4718]: I1124 09:00:02.283628 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7" event={"ID":"fb162263-4f59-4327-92e2-f82027a7b1cd","Type":"ContainerDied","Data":"50163af9d8f4d67e87d6150eb8a4311b6257dad5fdc3a403c2b3cfef93a62b4b"} Nov 24 09:00:02 crc kubenswrapper[4718]: I1124 09:00:02.286269 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l" event={"ID":"d585b8bf-d86a-40bd-b6ea-08fe8a8113ab","Type":"ContainerStarted","Data":"5dca3025b77b4fd3f4a502b0d6cac46519b0098580be0751dbeda6841af30c04"} Nov 24 09:00:02 crc kubenswrapper[4718]: I1124 09:00:02.287420 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm" event={"ID":"fda669d9-910c-4494-b5bd-29658bc86875","Type":"ContainerStarted","Data":"26e4782044160f2844657679ddd4303637981e5571a8a61cf3c216c71a0740aa"} Nov 24 09:00:02 crc kubenswrapper[4718]: I1124 09:00:02.287623 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bcm9p" podUID="abd92df6-0726-441b-b9cd-2bd43343ba0e" containerName="registry-server" containerID="cri-o://e6f9b33bc76495edff5989b875ba47fa5876cc22fc1c24c3d9df77edd0c88e22" gracePeriod=2 Nov 24 09:00:02 crc kubenswrapper[4718]: I1124 09:00:02.318508 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l" podStartSLOduration=2.318488276 podStartE2EDuration="2.318488276s" podCreationTimestamp="2025-11-24 09:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 09:00:02.310234232 +0000 UTC m=+1474.426525136" watchObservedRunningTime="2025-11-24 09:00:02.318488276 +0000 UTC m=+1474.434779180" Nov 24 09:00:02 crc kubenswrapper[4718]: I1124 09:00:02.332773 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm" podStartSLOduration=2.332754197 podStartE2EDuration="2.332754197s" podCreationTimestamp="2025-11-24 09:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 09:00:02.328031931 +0000 UTC m=+1474.444322835" watchObservedRunningTime="2025-11-24 09:00:02.332754197 +0000 UTC m=+1474.449045101" Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.297932 4718 generic.go:334] "Generic (PLEG): container finished" podID="d585b8bf-d86a-40bd-b6ea-08fe8a8113ab" containerID="5dca3025b77b4fd3f4a502b0d6cac46519b0098580be0751dbeda6841af30c04" exitCode=0 Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.298024 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l" event={"ID":"d585b8bf-d86a-40bd-b6ea-08fe8a8113ab","Type":"ContainerDied","Data":"5dca3025b77b4fd3f4a502b0d6cac46519b0098580be0751dbeda6841af30c04"} Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.299838 4718 generic.go:334] "Generic (PLEG): container finished" podID="fda669d9-910c-4494-b5bd-29658bc86875" containerID="26e4782044160f2844657679ddd4303637981e5571a8a61cf3c216c71a0740aa" exitCode=0 Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.299888 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm" event={"ID":"fda669d9-910c-4494-b5bd-29658bc86875","Type":"ContainerDied","Data":"26e4782044160f2844657679ddd4303637981e5571a8a61cf3c216c71a0740aa"} Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.302109 4718 generic.go:334] "Generic (PLEG): container finished" podID="abd92df6-0726-441b-b9cd-2bd43343ba0e" containerID="e6f9b33bc76495edff5989b875ba47fa5876cc22fc1c24c3d9df77edd0c88e22" exitCode=0 Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.302189 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcm9p" event={"ID":"abd92df6-0726-441b-b9cd-2bd43343ba0e","Type":"ContainerDied","Data":"e6f9b33bc76495edff5989b875ba47fa5876cc22fc1c24c3d9df77edd0c88e22"} Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.612782 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7" Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.616310 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bcm9p" Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.637679 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-l6mmf" Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.637743 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-l6mmf" Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.681247 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-l6mmf" Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.773746 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abd92df6-0726-441b-b9cd-2bd43343ba0e-utilities\") pod \"abd92df6-0726-441b-b9cd-2bd43343ba0e\" (UID: \"abd92df6-0726-441b-b9cd-2bd43343ba0e\") " Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.773810 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzpws\" (UniqueName: \"kubernetes.io/projected/abd92df6-0726-441b-b9cd-2bd43343ba0e-kube-api-access-dzpws\") pod \"abd92df6-0726-441b-b9cd-2bd43343ba0e\" (UID: \"abd92df6-0726-441b-b9cd-2bd43343ba0e\") " Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.773865 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fb162263-4f59-4327-92e2-f82027a7b1cd-config-volume\") pod \"fb162263-4f59-4327-92e2-f82027a7b1cd\" (UID: \"fb162263-4f59-4327-92e2-f82027a7b1cd\") " Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.773900 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fb162263-4f59-4327-92e2-f82027a7b1cd-secret-volume\") pod \"fb162263-4f59-4327-92e2-f82027a7b1cd\" (UID: \"fb162263-4f59-4327-92e2-f82027a7b1cd\") " Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.773932 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdqgt\" (UniqueName: \"kubernetes.io/projected/fb162263-4f59-4327-92e2-f82027a7b1cd-kube-api-access-zdqgt\") pod \"fb162263-4f59-4327-92e2-f82027a7b1cd\" (UID: \"fb162263-4f59-4327-92e2-f82027a7b1cd\") " Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.774007 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abd92df6-0726-441b-b9cd-2bd43343ba0e-catalog-content\") pod \"abd92df6-0726-441b-b9cd-2bd43343ba0e\" (UID: \"abd92df6-0726-441b-b9cd-2bd43343ba0e\") " Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.775228 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb162263-4f59-4327-92e2-f82027a7b1cd-config-volume" (OuterVolumeSpecName: "config-volume") pod "fb162263-4f59-4327-92e2-f82027a7b1cd" (UID: "fb162263-4f59-4327-92e2-f82027a7b1cd"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.775691 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abd92df6-0726-441b-b9cd-2bd43343ba0e-utilities" (OuterVolumeSpecName: "utilities") pod "abd92df6-0726-441b-b9cd-2bd43343ba0e" (UID: "abd92df6-0726-441b-b9cd-2bd43343ba0e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.785308 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb162263-4f59-4327-92e2-f82027a7b1cd-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fb162263-4f59-4327-92e2-f82027a7b1cd" (UID: "fb162263-4f59-4327-92e2-f82027a7b1cd"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.785353 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb162263-4f59-4327-92e2-f82027a7b1cd-kube-api-access-zdqgt" (OuterVolumeSpecName: "kube-api-access-zdqgt") pod "fb162263-4f59-4327-92e2-f82027a7b1cd" (UID: "fb162263-4f59-4327-92e2-f82027a7b1cd"). InnerVolumeSpecName "kube-api-access-zdqgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.788869 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abd92df6-0726-441b-b9cd-2bd43343ba0e-kube-api-access-dzpws" (OuterVolumeSpecName: "kube-api-access-dzpws") pod "abd92df6-0726-441b-b9cd-2bd43343ba0e" (UID: "abd92df6-0726-441b-b9cd-2bd43343ba0e"). InnerVolumeSpecName "kube-api-access-dzpws". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.870457 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abd92df6-0726-441b-b9cd-2bd43343ba0e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "abd92df6-0726-441b-b9cd-2bd43343ba0e" (UID: "abd92df6-0726-441b-b9cd-2bd43343ba0e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.876067 4718 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fb162263-4f59-4327-92e2-f82027a7b1cd-config-volume\") on node \"crc\" DevicePath \"\"" Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.876112 4718 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fb162263-4f59-4327-92e2-f82027a7b1cd-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.876125 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdqgt\" (UniqueName: \"kubernetes.io/projected/fb162263-4f59-4327-92e2-f82027a7b1cd-kube-api-access-zdqgt\") on node \"crc\" DevicePath \"\"" Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.876134 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abd92df6-0726-441b-b9cd-2bd43343ba0e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.876142 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abd92df6-0726-441b-b9cd-2bd43343ba0e-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 09:00:03 crc kubenswrapper[4718]: I1124 09:00:03.876150 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzpws\" (UniqueName: \"kubernetes.io/projected/abd92df6-0726-441b-b9cd-2bd43343ba0e-kube-api-access-dzpws\") on node \"crc\" DevicePath \"\"" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.312405 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.313083 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399580-n2wk7" event={"ID":"fb162263-4f59-4327-92e2-f82027a7b1cd","Type":"ContainerDied","Data":"c4badda91f2616a52c467c4078597f4892d924112ca40bfdb729cb70dfb02f29"} Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.313150 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c4badda91f2616a52c467c4078597f4892d924112ca40bfdb729cb70dfb02f29" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.316029 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bcm9p" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.316518 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bcm9p" event={"ID":"abd92df6-0726-441b-b9cd-2bd43343ba0e","Type":"ContainerDied","Data":"ba6adb19b6a42fb18637fcd0a14d5a81dfc0d37de830cab4e5a4c2e62e52f4a1"} Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.316620 4718 scope.go:117] "RemoveContainer" containerID="e6f9b33bc76495edff5989b875ba47fa5876cc22fc1c24c3d9df77edd0c88e22" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.343566 4718 scope.go:117] "RemoveContainer" containerID="e06063edfdcdbdec8be5642064198ae35158cd9014ce01afd431dda091c74104" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.359029 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bcm9p"] Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.366448 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bcm9p"] Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.423308 4718 scope.go:117] "RemoveContainer" containerID="e79cc6af8be8fafe897ed1afeefabe9001e35d473b5752d9530c08743b4b749d" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.434243 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-l6mmf" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.605486 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abd92df6-0726-441b-b9cd-2bd43343ba0e" path="/var/lib/kubelet/pods/abd92df6-0726-441b-b9cd-2bd43343ba0e/volumes" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.626024 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.634254 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.690113 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-cache-config-data\" (UniqueName: \"kubernetes.io/secret/d585b8bf-d86a-40bd-b6ea-08fe8a8113ab-image-cache-config-data\") pod \"d585b8bf-d86a-40bd-b6ea-08fe8a8113ab\" (UID: \"d585b8bf-d86a-40bd-b6ea-08fe8a8113ab\") " Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.690216 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance-cache\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"d585b8bf-d86a-40bd-b6ea-08fe8a8113ab\" (UID: \"d585b8bf-d86a-40bd-b6ea-08fe8a8113ab\") " Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.690323 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jnj6q\" (UniqueName: \"kubernetes.io/projected/d585b8bf-d86a-40bd-b6ea-08fe8a8113ab-kube-api-access-jnj6q\") pod \"d585b8bf-d86a-40bd-b6ea-08fe8a8113ab\" (UID: \"d585b8bf-d86a-40bd-b6ea-08fe8a8113ab\") " Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.694851 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d585b8bf-d86a-40bd-b6ea-08fe8a8113ab-image-cache-config-data" (OuterVolumeSpecName: "image-cache-config-data") pod "d585b8bf-d86a-40bd-b6ea-08fe8a8113ab" (UID: "d585b8bf-d86a-40bd-b6ea-08fe8a8113ab"). InnerVolumeSpecName "image-cache-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.695173 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance-cache") pod "d585b8bf-d86a-40bd-b6ea-08fe8a8113ab" (UID: "d585b8bf-d86a-40bd-b6ea-08fe8a8113ab"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.695446 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d585b8bf-d86a-40bd-b6ea-08fe8a8113ab-kube-api-access-jnj6q" (OuterVolumeSpecName: "kube-api-access-jnj6q") pod "d585b8bf-d86a-40bd-b6ea-08fe8a8113ab" (UID: "d585b8bf-d86a-40bd-b6ea-08fe8a8113ab"). InnerVolumeSpecName "kube-api-access-jnj6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.791620 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg9qg\" (UniqueName: \"kubernetes.io/projected/fda669d9-910c-4494-b5bd-29658bc86875-kube-api-access-qg9qg\") pod \"fda669d9-910c-4494-b5bd-29658bc86875\" (UID: \"fda669d9-910c-4494-b5bd-29658bc86875\") " Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.791683 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-cache-config-data\" (UniqueName: \"kubernetes.io/secret/fda669d9-910c-4494-b5bd-29658bc86875-image-cache-config-data\") pod \"fda669d9-910c-4494-b5bd-29658bc86875\" (UID: \"fda669d9-910c-4494-b5bd-29658bc86875\") " Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.791822 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance-cache\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"fda669d9-910c-4494-b5bd-29658bc86875\" (UID: \"fda669d9-910c-4494-b5bd-29658bc86875\") " Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.792183 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jnj6q\" (UniqueName: \"kubernetes.io/projected/d585b8bf-d86a-40bd-b6ea-08fe8a8113ab-kube-api-access-jnj6q\") on node \"crc\" DevicePath \"\"" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.792207 4718 reconciler_common.go:293] "Volume detached for volume \"image-cache-config-data\" (UniqueName: \"kubernetes.io/secret/d585b8bf-d86a-40bd-b6ea-08fe8a8113ab-image-cache-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.795114 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda669d9-910c-4494-b5bd-29658bc86875-image-cache-config-data" (OuterVolumeSpecName: "image-cache-config-data") pod "fda669d9-910c-4494-b5bd-29658bc86875" (UID: "fda669d9-910c-4494-b5bd-29658bc86875"). InnerVolumeSpecName "image-cache-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.795330 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda669d9-910c-4494-b5bd-29658bc86875-kube-api-access-qg9qg" (OuterVolumeSpecName: "kube-api-access-qg9qg") pod "fda669d9-910c-4494-b5bd-29658bc86875" (UID: "fda669d9-910c-4494-b5bd-29658bc86875"). InnerVolumeSpecName "kube-api-access-qg9qg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.795349 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance-cache") pod "fda669d9-910c-4494-b5bd-29658bc86875" (UID: "fda669d9-910c-4494-b5bd-29658bc86875"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.893851 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg9qg\" (UniqueName: \"kubernetes.io/projected/fda669d9-910c-4494-b5bd-29658bc86875-kube-api-access-qg9qg\") on node \"crc\" DevicePath \"\"" Nov 24 09:00:04 crc kubenswrapper[4718]: I1124 09:00:04.893907 4718 reconciler_common.go:293] "Volume detached for volume \"image-cache-config-data\" (UniqueName: \"kubernetes.io/secret/fda669d9-910c-4494-b5bd-29658bc86875-image-cache-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 09:00:05 crc kubenswrapper[4718]: I1124 09:00:05.325311 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l" event={"ID":"d585b8bf-d86a-40bd-b6ea-08fe8a8113ab","Type":"ContainerDied","Data":"d03e22526847226dcccc63d84211754153ef94d3641acc23c65aca0d436d1036"} Nov 24 09:00:05 crc kubenswrapper[4718]: I1124 09:00:05.325359 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d03e22526847226dcccc63d84211754153ef94d3641acc23c65aca0d436d1036" Nov 24 09:00:05 crc kubenswrapper[4718]: I1124 09:00:05.325365 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-cache-glance-default-external-api-0-cleaner-2939958kl49l" Nov 24 09:00:05 crc kubenswrapper[4718]: I1124 09:00:05.327100 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm" event={"ID":"fda669d9-910c-4494-b5bd-29658bc86875","Type":"ContainerDied","Data":"3119af3d781f51b124c23052cba5e914ca6e9122594d5a53e14e5739d2519a5b"} Nov 24 09:00:05 crc kubenswrapper[4718]: I1124 09:00:05.327160 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3119af3d781f51b124c23052cba5e914ca6e9122594d5a53e14e5739d2519a5b" Nov 24 09:00:05 crc kubenswrapper[4718]: I1124 09:00:05.327163 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm" Nov 24 09:00:05 crc kubenswrapper[4718]: I1124 09:00:05.911360 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l6mmf"] Nov 24 09:00:06 crc kubenswrapper[4718]: I1124 09:00:06.335304 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-l6mmf" podUID="b0b5661f-b192-4105-8650-855923df4c2c" containerName="registry-server" containerID="cri-o://48428d9fc3ad899c25536ff719de5c0b87ce78b147a36f821dcfc5f4bd7a644d" gracePeriod=2 Nov 24 09:00:06 crc kubenswrapper[4718]: I1124 09:00:06.716241 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l6mmf" Nov 24 09:00:06 crc kubenswrapper[4718]: I1124 09:00:06.821763 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9t4b\" (UniqueName: \"kubernetes.io/projected/b0b5661f-b192-4105-8650-855923df4c2c-kube-api-access-v9t4b\") pod \"b0b5661f-b192-4105-8650-855923df4c2c\" (UID: \"b0b5661f-b192-4105-8650-855923df4c2c\") " Nov 24 09:00:06 crc kubenswrapper[4718]: I1124 09:00:06.821879 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0b5661f-b192-4105-8650-855923df4c2c-catalog-content\") pod \"b0b5661f-b192-4105-8650-855923df4c2c\" (UID: \"b0b5661f-b192-4105-8650-855923df4c2c\") " Nov 24 09:00:06 crc kubenswrapper[4718]: I1124 09:00:06.821934 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0b5661f-b192-4105-8650-855923df4c2c-utilities\") pod \"b0b5661f-b192-4105-8650-855923df4c2c\" (UID: \"b0b5661f-b192-4105-8650-855923df4c2c\") " Nov 24 09:00:06 crc kubenswrapper[4718]: I1124 09:00:06.822853 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0b5661f-b192-4105-8650-855923df4c2c-utilities" (OuterVolumeSpecName: "utilities") pod "b0b5661f-b192-4105-8650-855923df4c2c" (UID: "b0b5661f-b192-4105-8650-855923df4c2c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 09:00:06 crc kubenswrapper[4718]: I1124 09:00:06.826456 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0b5661f-b192-4105-8650-855923df4c2c-kube-api-access-v9t4b" (OuterVolumeSpecName: "kube-api-access-v9t4b") pod "b0b5661f-b192-4105-8650-855923df4c2c" (UID: "b0b5661f-b192-4105-8650-855923df4c2c"). InnerVolumeSpecName "kube-api-access-v9t4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 09:00:06 crc kubenswrapper[4718]: I1124 09:00:06.864614 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0b5661f-b192-4105-8650-855923df4c2c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b0b5661f-b192-4105-8650-855923df4c2c" (UID: "b0b5661f-b192-4105-8650-855923df4c2c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 09:00:06 crc kubenswrapper[4718]: I1124 09:00:06.923997 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9t4b\" (UniqueName: \"kubernetes.io/projected/b0b5661f-b192-4105-8650-855923df4c2c-kube-api-access-v9t4b\") on node \"crc\" DevicePath \"\"" Nov 24 09:00:06 crc kubenswrapper[4718]: I1124 09:00:06.924033 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0b5661f-b192-4105-8650-855923df4c2c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 09:00:06 crc kubenswrapper[4718]: I1124 09:00:06.924043 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0b5661f-b192-4105-8650-855923df4c2c-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 09:00:07 crc kubenswrapper[4718]: I1124 09:00:07.345649 4718 generic.go:334] "Generic (PLEG): container finished" podID="b0b5661f-b192-4105-8650-855923df4c2c" containerID="48428d9fc3ad899c25536ff719de5c0b87ce78b147a36f821dcfc5f4bd7a644d" exitCode=0 Nov 24 09:00:07 crc kubenswrapper[4718]: I1124 09:00:07.345700 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l6mmf" event={"ID":"b0b5661f-b192-4105-8650-855923df4c2c","Type":"ContainerDied","Data":"48428d9fc3ad899c25536ff719de5c0b87ce78b147a36f821dcfc5f4bd7a644d"} Nov 24 09:00:07 crc kubenswrapper[4718]: I1124 09:00:07.345728 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l6mmf" event={"ID":"b0b5661f-b192-4105-8650-855923df4c2c","Type":"ContainerDied","Data":"cf21a25082201a67d3a90f162eff5679aa8a1ac0ac2a80a2d6da6555c25ec581"} Nov 24 09:00:07 crc kubenswrapper[4718]: I1124 09:00:07.345747 4718 scope.go:117] "RemoveContainer" containerID="48428d9fc3ad899c25536ff719de5c0b87ce78b147a36f821dcfc5f4bd7a644d" Nov 24 09:00:07 crc kubenswrapper[4718]: I1124 09:00:07.345877 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l6mmf" Nov 24 09:00:07 crc kubenswrapper[4718]: I1124 09:00:07.366266 4718 scope.go:117] "RemoveContainer" containerID="ad1c403166b34959809eea75991cdd8e520d974b8a378a73c36ed23b148d2ee8" Nov 24 09:00:07 crc kubenswrapper[4718]: I1124 09:00:07.384143 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l6mmf"] Nov 24 09:00:07 crc kubenswrapper[4718]: I1124 09:00:07.390116 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-l6mmf"] Nov 24 09:00:07 crc kubenswrapper[4718]: I1124 09:00:07.404372 4718 scope.go:117] "RemoveContainer" containerID="a8ab817eef0118a6572c73a55af63e8c5f0330036ff582e364e68eea89068ec6" Nov 24 09:00:07 crc kubenswrapper[4718]: I1124 09:00:07.429222 4718 scope.go:117] "RemoveContainer" containerID="48428d9fc3ad899c25536ff719de5c0b87ce78b147a36f821dcfc5f4bd7a644d" Nov 24 09:00:07 crc kubenswrapper[4718]: E1124 09:00:07.431267 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48428d9fc3ad899c25536ff719de5c0b87ce78b147a36f821dcfc5f4bd7a644d\": container with ID starting with 48428d9fc3ad899c25536ff719de5c0b87ce78b147a36f821dcfc5f4bd7a644d not found: ID does not exist" containerID="48428d9fc3ad899c25536ff719de5c0b87ce78b147a36f821dcfc5f4bd7a644d" Nov 24 09:00:07 crc kubenswrapper[4718]: I1124 09:00:07.431314 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48428d9fc3ad899c25536ff719de5c0b87ce78b147a36f821dcfc5f4bd7a644d"} err="failed to get container status \"48428d9fc3ad899c25536ff719de5c0b87ce78b147a36f821dcfc5f4bd7a644d\": rpc error: code = NotFound desc = could not find container \"48428d9fc3ad899c25536ff719de5c0b87ce78b147a36f821dcfc5f4bd7a644d\": container with ID starting with 48428d9fc3ad899c25536ff719de5c0b87ce78b147a36f821dcfc5f4bd7a644d not found: ID does not exist" Nov 24 09:00:07 crc kubenswrapper[4718]: I1124 09:00:07.431344 4718 scope.go:117] "RemoveContainer" containerID="ad1c403166b34959809eea75991cdd8e520d974b8a378a73c36ed23b148d2ee8" Nov 24 09:00:07 crc kubenswrapper[4718]: E1124 09:00:07.431794 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad1c403166b34959809eea75991cdd8e520d974b8a378a73c36ed23b148d2ee8\": container with ID starting with ad1c403166b34959809eea75991cdd8e520d974b8a378a73c36ed23b148d2ee8 not found: ID does not exist" containerID="ad1c403166b34959809eea75991cdd8e520d974b8a378a73c36ed23b148d2ee8" Nov 24 09:00:07 crc kubenswrapper[4718]: I1124 09:00:07.431822 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad1c403166b34959809eea75991cdd8e520d974b8a378a73c36ed23b148d2ee8"} err="failed to get container status \"ad1c403166b34959809eea75991cdd8e520d974b8a378a73c36ed23b148d2ee8\": rpc error: code = NotFound desc = could not find container \"ad1c403166b34959809eea75991cdd8e520d974b8a378a73c36ed23b148d2ee8\": container with ID starting with ad1c403166b34959809eea75991cdd8e520d974b8a378a73c36ed23b148d2ee8 not found: ID does not exist" Nov 24 09:00:07 crc kubenswrapper[4718]: I1124 09:00:07.431841 4718 scope.go:117] "RemoveContainer" containerID="a8ab817eef0118a6572c73a55af63e8c5f0330036ff582e364e68eea89068ec6" Nov 24 09:00:07 crc kubenswrapper[4718]: E1124 09:00:07.432305 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8ab817eef0118a6572c73a55af63e8c5f0330036ff582e364e68eea89068ec6\": container with ID starting with a8ab817eef0118a6572c73a55af63e8c5f0330036ff582e364e68eea89068ec6 not found: ID does not exist" containerID="a8ab817eef0118a6572c73a55af63e8c5f0330036ff582e364e68eea89068ec6" Nov 24 09:00:07 crc kubenswrapper[4718]: I1124 09:00:07.432341 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8ab817eef0118a6572c73a55af63e8c5f0330036ff582e364e68eea89068ec6"} err="failed to get container status \"a8ab817eef0118a6572c73a55af63e8c5f0330036ff582e364e68eea89068ec6\": rpc error: code = NotFound desc = could not find container \"a8ab817eef0118a6572c73a55af63e8c5f0330036ff582e364e68eea89068ec6\": container with ID starting with a8ab817eef0118a6572c73a55af63e8c5f0330036ff582e364e68eea89068ec6 not found: ID does not exist" Nov 24 09:00:08 crc kubenswrapper[4718]: I1124 09:00:08.606135 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0b5661f-b192-4105-8650-855923df4c2c" path="/var/lib/kubelet/pods/b0b5661f-b192-4105-8650-855923df4c2c/volumes" Nov 24 09:00:22 crc kubenswrapper[4718]: I1124 09:00:22.044758 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 09:00:22 crc kubenswrapper[4718]: I1124 09:00:22.045302 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 09:00:29 crc kubenswrapper[4718]: I1124 09:00:29.536207 4718 scope.go:117] "RemoveContainer" containerID="b75a3576e90bc6c8d89e44520da9cd75e99bf98c2b18b0ac9c3b1dbadfc061ba" Nov 24 09:00:29 crc kubenswrapper[4718]: I1124 09:00:29.560659 4718 scope.go:117] "RemoveContainer" containerID="1bfb3ee2b066050dfefc5309b07ae571d8fa0faccf565ad77ce8b390f879324c" Nov 24 09:00:29 crc kubenswrapper[4718]: I1124 09:00:29.587770 4718 scope.go:117] "RemoveContainer" containerID="556cbcc6dcc6c37976558b25b353deea6ebc6ae6f5747684123c6a33da074b0e" Nov 24 09:00:49 crc kubenswrapper[4718]: I1124 09:00:49.042720 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/keystone-db-create-2rvdl"] Nov 24 09:00:49 crc kubenswrapper[4718]: I1124 09:00:49.047868 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/keystone-f088-account-create-update-h2pdn"] Nov 24 09:00:49 crc kubenswrapper[4718]: I1124 09:00:49.053210 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/keystone-f088-account-create-update-h2pdn"] Nov 24 09:00:49 crc kubenswrapper[4718]: I1124 09:00:49.060284 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/keystone-db-create-2rvdl"] Nov 24 09:00:50 crc kubenswrapper[4718]: I1124 09:00:50.610419 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c8913b0-43ac-4afc-896f-8c82f07d2c56" path="/var/lib/kubelet/pods/2c8913b0-43ac-4afc-896f-8c82f07d2c56/volumes" Nov 24 09:00:50 crc kubenswrapper[4718]: I1124 09:00:50.611325 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db5b4c90-8a71-4000-9666-93c6d8bd99db" path="/var/lib/kubelet/pods/db5b4c90-8a71-4000-9666-93c6d8bd99db/volumes" Nov 24 09:00:52 crc kubenswrapper[4718]: I1124 09:00:52.045195 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 09:00:52 crc kubenswrapper[4718]: I1124 09:00:52.045606 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 09:00:52 crc kubenswrapper[4718]: I1124 09:00:52.045670 4718 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 09:00:52 crc kubenswrapper[4718]: I1124 09:00:52.046582 4718 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f"} pod="openshift-machine-config-operator/machine-config-daemon-575gl" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 09:00:52 crc kubenswrapper[4718]: I1124 09:00:52.046668 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" containerID="cri-o://aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" gracePeriod=600 Nov 24 09:00:52 crc kubenswrapper[4718]: E1124 09:00:52.166552 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:00:52 crc kubenswrapper[4718]: I1124 09:00:52.669452 4718 generic.go:334] "Generic (PLEG): container finished" podID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" exitCode=0 Nov 24 09:00:52 crc kubenswrapper[4718]: I1124 09:00:52.669499 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerDied","Data":"aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f"} Nov 24 09:00:52 crc kubenswrapper[4718]: I1124 09:00:52.669541 4718 scope.go:117] "RemoveContainer" containerID="05b60275224e52d49ea67f65feb883fbf9bd70bf35c0916e6ca86fad0a473f7d" Nov 24 09:00:52 crc kubenswrapper[4718]: I1124 09:00:52.670132 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:00:52 crc kubenswrapper[4718]: E1124 09:00:52.670407 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.141211 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/keystone-cron-29399581-q4mhh"] Nov 24 09:01:00 crc kubenswrapper[4718]: E1124 09:01:00.142888 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb162263-4f59-4327-92e2-f82027a7b1cd" containerName="collect-profiles" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.142919 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb162263-4f59-4327-92e2-f82027a7b1cd" containerName="collect-profiles" Nov 24 09:01:00 crc kubenswrapper[4718]: E1124 09:01:00.142943 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0b5661f-b192-4105-8650-855923df4c2c" containerName="extract-utilities" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.142953 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0b5661f-b192-4105-8650-855923df4c2c" containerName="extract-utilities" Nov 24 09:01:00 crc kubenswrapper[4718]: E1124 09:01:00.142990 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd92df6-0726-441b-b9cd-2bd43343ba0e" containerName="extract-content" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.143003 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd92df6-0726-441b-b9cd-2bd43343ba0e" containerName="extract-content" Nov 24 09:01:00 crc kubenswrapper[4718]: E1124 09:01:00.143020 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd92df6-0726-441b-b9cd-2bd43343ba0e" containerName="extract-utilities" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.143028 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd92df6-0726-441b-b9cd-2bd43343ba0e" containerName="extract-utilities" Nov 24 09:01:00 crc kubenswrapper[4718]: E1124 09:01:00.143038 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd92df6-0726-441b-b9cd-2bd43343ba0e" containerName="registry-server" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.143046 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd92df6-0726-441b-b9cd-2bd43343ba0e" containerName="registry-server" Nov 24 09:01:00 crc kubenswrapper[4718]: E1124 09:01:00.143065 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d585b8bf-d86a-40bd-b6ea-08fe8a8113ab" containerName="glance-cache-glance-default-external-api-0-cleaner" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.143076 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="d585b8bf-d86a-40bd-b6ea-08fe8a8113ab" containerName="glance-cache-glance-default-external-api-0-cleaner" Nov 24 09:01:00 crc kubenswrapper[4718]: E1124 09:01:00.143091 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0b5661f-b192-4105-8650-855923df4c2c" containerName="extract-content" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.143099 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0b5661f-b192-4105-8650-855923df4c2c" containerName="extract-content" Nov 24 09:01:00 crc kubenswrapper[4718]: E1124 09:01:00.143120 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0b5661f-b192-4105-8650-855923df4c2c" containerName="registry-server" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.143129 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0b5661f-b192-4105-8650-855923df4c2c" containerName="registry-server" Nov 24 09:01:00 crc kubenswrapper[4718]: E1124 09:01:00.143145 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fda669d9-910c-4494-b5bd-29658bc86875" containerName="glance-cache-glance-default-internal-api-0-cleaner" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.143154 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="fda669d9-910c-4494-b5bd-29658bc86875" containerName="glance-cache-glance-default-internal-api-0-cleaner" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.143374 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0b5661f-b192-4105-8650-855923df4c2c" containerName="registry-server" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.143394 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb162263-4f59-4327-92e2-f82027a7b1cd" containerName="collect-profiles" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.143417 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="fda669d9-910c-4494-b5bd-29658bc86875" containerName="glance-cache-glance-default-internal-api-0-cleaner" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.143433 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="d585b8bf-d86a-40bd-b6ea-08fe8a8113ab" containerName="glance-cache-glance-default-external-api-0-cleaner" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.143448 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd92df6-0726-441b-b9cd-2bd43343ba0e" containerName="registry-server" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.144468 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-cron-29399581-q4mhh" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.148901 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-cron-29399581-q4mhh"] Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.219355 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29chh\" (UniqueName: \"kubernetes.io/projected/0e41c559-7a83-477a-a86f-1cfd626da128-kube-api-access-29chh\") pod \"keystone-cron-29399581-q4mhh\" (UID: \"0e41c559-7a83-477a-a86f-1cfd626da128\") " pod="glance-kuttl-tests/keystone-cron-29399581-q4mhh" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.219471 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0e41c559-7a83-477a-a86f-1cfd626da128-fernet-keys\") pod \"keystone-cron-29399581-q4mhh\" (UID: \"0e41c559-7a83-477a-a86f-1cfd626da128\") " pod="glance-kuttl-tests/keystone-cron-29399581-q4mhh" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.219516 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e41c559-7a83-477a-a86f-1cfd626da128-config-data\") pod \"keystone-cron-29399581-q4mhh\" (UID: \"0e41c559-7a83-477a-a86f-1cfd626da128\") " pod="glance-kuttl-tests/keystone-cron-29399581-q4mhh" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.320410 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0e41c559-7a83-477a-a86f-1cfd626da128-fernet-keys\") pod \"keystone-cron-29399581-q4mhh\" (UID: \"0e41c559-7a83-477a-a86f-1cfd626da128\") " pod="glance-kuttl-tests/keystone-cron-29399581-q4mhh" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.320453 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e41c559-7a83-477a-a86f-1cfd626da128-config-data\") pod \"keystone-cron-29399581-q4mhh\" (UID: \"0e41c559-7a83-477a-a86f-1cfd626da128\") " pod="glance-kuttl-tests/keystone-cron-29399581-q4mhh" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.320511 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29chh\" (UniqueName: \"kubernetes.io/projected/0e41c559-7a83-477a-a86f-1cfd626da128-kube-api-access-29chh\") pod \"keystone-cron-29399581-q4mhh\" (UID: \"0e41c559-7a83-477a-a86f-1cfd626da128\") " pod="glance-kuttl-tests/keystone-cron-29399581-q4mhh" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.329348 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e41c559-7a83-477a-a86f-1cfd626da128-config-data\") pod \"keystone-cron-29399581-q4mhh\" (UID: \"0e41c559-7a83-477a-a86f-1cfd626da128\") " pod="glance-kuttl-tests/keystone-cron-29399581-q4mhh" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.331582 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0e41c559-7a83-477a-a86f-1cfd626da128-fernet-keys\") pod \"keystone-cron-29399581-q4mhh\" (UID: \"0e41c559-7a83-477a-a86f-1cfd626da128\") " pod="glance-kuttl-tests/keystone-cron-29399581-q4mhh" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.337029 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29chh\" (UniqueName: \"kubernetes.io/projected/0e41c559-7a83-477a-a86f-1cfd626da128-kube-api-access-29chh\") pod \"keystone-cron-29399581-q4mhh\" (UID: \"0e41c559-7a83-477a-a86f-1cfd626da128\") " pod="glance-kuttl-tests/keystone-cron-29399581-q4mhh" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.468257 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-cron-29399581-q4mhh" Nov 24 09:01:00 crc kubenswrapper[4718]: I1124 09:01:00.859529 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-cron-29399581-q4mhh"] Nov 24 09:01:01 crc kubenswrapper[4718]: I1124 09:01:01.747270 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-cron-29399581-q4mhh" event={"ID":"0e41c559-7a83-477a-a86f-1cfd626da128","Type":"ContainerStarted","Data":"5f253c51f091cab67c56e317097a6daca02bdf6010128bf1225ece3ac7fa90d1"} Nov 24 09:01:01 crc kubenswrapper[4718]: I1124 09:01:01.747778 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-cron-29399581-q4mhh" event={"ID":"0e41c559-7a83-477a-a86f-1cfd626da128","Type":"ContainerStarted","Data":"4c374a48c497513b99faa7be4c7b3ac28effda595a7a70c1c9d03843475914f4"} Nov 24 09:01:01 crc kubenswrapper[4718]: I1124 09:01:01.767805 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/keystone-cron-29399581-q4mhh" podStartSLOduration=1.767791694 podStartE2EDuration="1.767791694s" podCreationTimestamp="2025-11-24 09:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 09:01:01.766700337 +0000 UTC m=+1533.882991241" watchObservedRunningTime="2025-11-24 09:01:01.767791694 +0000 UTC m=+1533.884082598" Nov 24 09:01:03 crc kubenswrapper[4718]: I1124 09:01:03.762752 4718 generic.go:334] "Generic (PLEG): container finished" podID="0e41c559-7a83-477a-a86f-1cfd626da128" containerID="5f253c51f091cab67c56e317097a6daca02bdf6010128bf1225ece3ac7fa90d1" exitCode=0 Nov 24 09:01:03 crc kubenswrapper[4718]: I1124 09:01:03.762793 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-cron-29399581-q4mhh" event={"ID":"0e41c559-7a83-477a-a86f-1cfd626da128","Type":"ContainerDied","Data":"5f253c51f091cab67c56e317097a6daca02bdf6010128bf1225ece3ac7fa90d1"} Nov 24 09:01:05 crc kubenswrapper[4718]: I1124 09:01:05.035361 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/keystone-db-sync-nwsdw"] Nov 24 09:01:05 crc kubenswrapper[4718]: I1124 09:01:05.041751 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/keystone-db-sync-nwsdw"] Nov 24 09:01:05 crc kubenswrapper[4718]: I1124 09:01:05.070907 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-cron-29399581-q4mhh" Nov 24 09:01:05 crc kubenswrapper[4718]: I1124 09:01:05.203258 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-29chh\" (UniqueName: \"kubernetes.io/projected/0e41c559-7a83-477a-a86f-1cfd626da128-kube-api-access-29chh\") pod \"0e41c559-7a83-477a-a86f-1cfd626da128\" (UID: \"0e41c559-7a83-477a-a86f-1cfd626da128\") " Nov 24 09:01:05 crc kubenswrapper[4718]: I1124 09:01:05.203399 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e41c559-7a83-477a-a86f-1cfd626da128-config-data\") pod \"0e41c559-7a83-477a-a86f-1cfd626da128\" (UID: \"0e41c559-7a83-477a-a86f-1cfd626da128\") " Nov 24 09:01:05 crc kubenswrapper[4718]: I1124 09:01:05.204101 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0e41c559-7a83-477a-a86f-1cfd626da128-fernet-keys\") pod \"0e41c559-7a83-477a-a86f-1cfd626da128\" (UID: \"0e41c559-7a83-477a-a86f-1cfd626da128\") " Nov 24 09:01:05 crc kubenswrapper[4718]: I1124 09:01:05.208500 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e41c559-7a83-477a-a86f-1cfd626da128-kube-api-access-29chh" (OuterVolumeSpecName: "kube-api-access-29chh") pod "0e41c559-7a83-477a-a86f-1cfd626da128" (UID: "0e41c559-7a83-477a-a86f-1cfd626da128"). InnerVolumeSpecName "kube-api-access-29chh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 09:01:05 crc kubenswrapper[4718]: I1124 09:01:05.208582 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e41c559-7a83-477a-a86f-1cfd626da128-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "0e41c559-7a83-477a-a86f-1cfd626da128" (UID: "0e41c559-7a83-477a-a86f-1cfd626da128"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 09:01:05 crc kubenswrapper[4718]: I1124 09:01:05.241705 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e41c559-7a83-477a-a86f-1cfd626da128-config-data" (OuterVolumeSpecName: "config-data") pod "0e41c559-7a83-477a-a86f-1cfd626da128" (UID: "0e41c559-7a83-477a-a86f-1cfd626da128"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 09:01:05 crc kubenswrapper[4718]: I1124 09:01:05.305226 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-29chh\" (UniqueName: \"kubernetes.io/projected/0e41c559-7a83-477a-a86f-1cfd626da128-kube-api-access-29chh\") on node \"crc\" DevicePath \"\"" Nov 24 09:01:05 crc kubenswrapper[4718]: I1124 09:01:05.305262 4718 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e41c559-7a83-477a-a86f-1cfd626da128-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 09:01:05 crc kubenswrapper[4718]: I1124 09:01:05.305272 4718 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0e41c559-7a83-477a-a86f-1cfd626da128-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 24 09:01:05 crc kubenswrapper[4718]: I1124 09:01:05.777394 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-cron-29399581-q4mhh" event={"ID":"0e41c559-7a83-477a-a86f-1cfd626da128","Type":"ContainerDied","Data":"4c374a48c497513b99faa7be4c7b3ac28effda595a7a70c1c9d03843475914f4"} Nov 24 09:01:05 crc kubenswrapper[4718]: I1124 09:01:05.777443 4718 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c374a48c497513b99faa7be4c7b3ac28effda595a7a70c1c9d03843475914f4" Nov 24 09:01:05 crc kubenswrapper[4718]: I1124 09:01:05.777504 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-cron-29399581-q4mhh" Nov 24 09:01:06 crc kubenswrapper[4718]: I1124 09:01:06.596504 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:01:06 crc kubenswrapper[4718]: E1124 09:01:06.596866 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:01:06 crc kubenswrapper[4718]: I1124 09:01:06.608622 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8211a8c-2e51-4031-b0ef-4831c0c97924" path="/var/lib/kubelet/pods/a8211a8c-2e51-4031-b0ef-4831c0c97924/volumes" Nov 24 09:01:11 crc kubenswrapper[4718]: I1124 09:01:11.024653 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/keystone-bootstrap-wrd7l"] Nov 24 09:01:11 crc kubenswrapper[4718]: I1124 09:01:11.029796 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/keystone-bootstrap-wrd7l"] Nov 24 09:01:12 crc kubenswrapper[4718]: I1124 09:01:12.604377 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fb11712-1346-4bba-9ee1-eaa809ec3b42" path="/var/lib/kubelet/pods/8fb11712-1346-4bba-9ee1-eaa809ec3b42/volumes" Nov 24 09:01:17 crc kubenswrapper[4718]: I1124 09:01:17.595959 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:01:17 crc kubenswrapper[4718]: E1124 09:01:17.596877 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:01:29 crc kubenswrapper[4718]: I1124 09:01:29.712084 4718 scope.go:117] "RemoveContainer" containerID="10598ea74b1401763667babf28b7f0c3618557d8c8083d034f3ed5e0843fc6ad" Nov 24 09:01:29 crc kubenswrapper[4718]: I1124 09:01:29.753478 4718 scope.go:117] "RemoveContainer" containerID="adf7aa49d3532a360b6fc92ca69aeff2f29ef66d4dfac87f647ddb1875778524" Nov 24 09:01:29 crc kubenswrapper[4718]: I1124 09:01:29.792248 4718 scope.go:117] "RemoveContainer" containerID="ec56abd82bb090b0d5bb352fdf038920ba24a10e6d5644da9239092c43262b2b" Nov 24 09:01:29 crc kubenswrapper[4718]: I1124 09:01:29.809618 4718 scope.go:117] "RemoveContainer" containerID="c86ae82377eca1533d390bf3eee4a32ec3ae8368ae2a2cc492ebfd98063d4095" Nov 24 09:01:29 crc kubenswrapper[4718]: I1124 09:01:29.841439 4718 scope.go:117] "RemoveContainer" containerID="6b0badc2ba3ca3d440201b4d63b16e615de176deb1853928db4f2ada532867e7" Nov 24 09:01:29 crc kubenswrapper[4718]: I1124 09:01:29.866003 4718 scope.go:117] "RemoveContainer" containerID="467e58e87863e217f10ddbcfe1df8b68c537a24a4e2bd68c2535f12942ff733a" Nov 24 09:01:31 crc kubenswrapper[4718]: I1124 09:01:31.596724 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:01:31 crc kubenswrapper[4718]: E1124 09:01:31.597070 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.015790 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/openstackclient"] Nov 24 09:01:36 crc kubenswrapper[4718]: E1124 09:01:36.016528 4718 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e41c559-7a83-477a-a86f-1cfd626da128" containerName="keystone-cron" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.016541 4718 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e41c559-7a83-477a-a86f-1cfd626da128" containerName="keystone-cron" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.016676 4718 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e41c559-7a83-477a-a86f-1cfd626da128" containerName="keystone-cron" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.017127 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstackclient" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.019120 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openstack-config" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.019140 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"openstack-config-secret" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.019203 4718 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"default-dockercfg-d4cb2" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.023823 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstackclient"] Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.027147 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openstack-scripts-9db6gc427h" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.168915 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ee8580df-7780-4709-89f4-c1c8b7222187-openstack-config\") pod \"openstackclient\" (UID: \"ee8580df-7780-4709-89f4-c1c8b7222187\") " pod="glance-kuttl-tests/openstackclient" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.169017 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/ee8580df-7780-4709-89f4-c1c8b7222187-openstack-scripts\") pod \"openstackclient\" (UID: \"ee8580df-7780-4709-89f4-c1c8b7222187\") " pod="glance-kuttl-tests/openstackclient" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.169052 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2fcn\" (UniqueName: \"kubernetes.io/projected/ee8580df-7780-4709-89f4-c1c8b7222187-kube-api-access-m2fcn\") pod \"openstackclient\" (UID: \"ee8580df-7780-4709-89f4-c1c8b7222187\") " pod="glance-kuttl-tests/openstackclient" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.169142 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ee8580df-7780-4709-89f4-c1c8b7222187-openstack-config-secret\") pod \"openstackclient\" (UID: \"ee8580df-7780-4709-89f4-c1c8b7222187\") " pod="glance-kuttl-tests/openstackclient" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.271510 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ee8580df-7780-4709-89f4-c1c8b7222187-openstack-config\") pod \"openstackclient\" (UID: \"ee8580df-7780-4709-89f4-c1c8b7222187\") " pod="glance-kuttl-tests/openstackclient" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.271571 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/ee8580df-7780-4709-89f4-c1c8b7222187-openstack-scripts\") pod \"openstackclient\" (UID: \"ee8580df-7780-4709-89f4-c1c8b7222187\") " pod="glance-kuttl-tests/openstackclient" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.271597 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2fcn\" (UniqueName: \"kubernetes.io/projected/ee8580df-7780-4709-89f4-c1c8b7222187-kube-api-access-m2fcn\") pod \"openstackclient\" (UID: \"ee8580df-7780-4709-89f4-c1c8b7222187\") " pod="glance-kuttl-tests/openstackclient" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.271635 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ee8580df-7780-4709-89f4-c1c8b7222187-openstack-config-secret\") pod \"openstackclient\" (UID: \"ee8580df-7780-4709-89f4-c1c8b7222187\") " pod="glance-kuttl-tests/openstackclient" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.272505 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ee8580df-7780-4709-89f4-c1c8b7222187-openstack-config\") pod \"openstackclient\" (UID: \"ee8580df-7780-4709-89f4-c1c8b7222187\") " pod="glance-kuttl-tests/openstackclient" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.272583 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/ee8580df-7780-4709-89f4-c1c8b7222187-openstack-scripts\") pod \"openstackclient\" (UID: \"ee8580df-7780-4709-89f4-c1c8b7222187\") " pod="glance-kuttl-tests/openstackclient" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.287572 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ee8580df-7780-4709-89f4-c1c8b7222187-openstack-config-secret\") pod \"openstackclient\" (UID: \"ee8580df-7780-4709-89f4-c1c8b7222187\") " pod="glance-kuttl-tests/openstackclient" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.295766 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2fcn\" (UniqueName: \"kubernetes.io/projected/ee8580df-7780-4709-89f4-c1c8b7222187-kube-api-access-m2fcn\") pod \"openstackclient\" (UID: \"ee8580df-7780-4709-89f4-c1c8b7222187\") " pod="glance-kuttl-tests/openstackclient" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.337949 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstackclient" Nov 24 09:01:36 crc kubenswrapper[4718]: I1124 09:01:36.749819 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstackclient"] Nov 24 09:01:37 crc kubenswrapper[4718]: I1124 09:01:37.002917 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstackclient" event={"ID":"ee8580df-7780-4709-89f4-c1c8b7222187","Type":"ContainerStarted","Data":"31e8a9be38b3f21c2c8983cd35096e0dee4cfaa8daa53c7a9bee144cfa8f9a13"} Nov 24 09:01:37 crc kubenswrapper[4718]: I1124 09:01:37.003452 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstackclient" event={"ID":"ee8580df-7780-4709-89f4-c1c8b7222187","Type":"ContainerStarted","Data":"ed2f8f1e7365d0b09e5d02cda667253028b3e7602f71d23642ed24916f24985a"} Nov 24 09:01:37 crc kubenswrapper[4718]: I1124 09:01:37.016403 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/openstackclient" podStartSLOduration=1.016385178 podStartE2EDuration="1.016385178s" podCreationTimestamp="2025-11-24 09:01:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 09:01:37.01603892 +0000 UTC m=+1569.132329854" watchObservedRunningTime="2025-11-24 09:01:37.016385178 +0000 UTC m=+1569.132676082" Nov 24 09:01:45 crc kubenswrapper[4718]: I1124 09:01:45.596633 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:01:45 crc kubenswrapper[4718]: E1124 09:01:45.597391 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:01:57 crc kubenswrapper[4718]: I1124 09:01:57.596661 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:01:57 crc kubenswrapper[4718]: E1124 09:01:57.597337 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:02:11 crc kubenswrapper[4718]: I1124 09:02:11.597197 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:02:11 crc kubenswrapper[4718]: E1124 09:02:11.598076 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:02:23 crc kubenswrapper[4718]: I1124 09:02:23.596877 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:02:23 crc kubenswrapper[4718]: E1124 09:02:23.597818 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:02:37 crc kubenswrapper[4718]: I1124 09:02:37.596532 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:02:37 crc kubenswrapper[4718]: E1124 09:02:37.598001 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:02:51 crc kubenswrapper[4718]: I1124 09:02:51.596631 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:02:51 crc kubenswrapper[4718]: E1124 09:02:51.597405 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:03:05 crc kubenswrapper[4718]: I1124 09:03:05.597065 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:03:05 crc kubenswrapper[4718]: E1124 09:03:05.598960 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:03:05 crc kubenswrapper[4718]: I1124 09:03:05.887858 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-8mmnm/must-gather-m55fg"] Nov 24 09:03:05 crc kubenswrapper[4718]: I1124 09:03:05.889225 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8mmnm/must-gather-m55fg" Nov 24 09:03:05 crc kubenswrapper[4718]: I1124 09:03:05.891739 4718 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-8mmnm"/"default-dockercfg-9rrpl" Nov 24 09:03:05 crc kubenswrapper[4718]: I1124 09:03:05.892174 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-8mmnm"/"openshift-service-ca.crt" Nov 24 09:03:05 crc kubenswrapper[4718]: I1124 09:03:05.892364 4718 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-8mmnm"/"kube-root-ca.crt" Nov 24 09:03:05 crc kubenswrapper[4718]: I1124 09:03:05.912861 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-8mmnm/must-gather-m55fg"] Nov 24 09:03:06 crc kubenswrapper[4718]: I1124 09:03:06.023567 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p62tm\" (UniqueName: \"kubernetes.io/projected/caa366f9-42ff-44f5-abab-e68bbef875d0-kube-api-access-p62tm\") pod \"must-gather-m55fg\" (UID: \"caa366f9-42ff-44f5-abab-e68bbef875d0\") " pod="openshift-must-gather-8mmnm/must-gather-m55fg" Nov 24 09:03:06 crc kubenswrapper[4718]: I1124 09:03:06.023636 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/caa366f9-42ff-44f5-abab-e68bbef875d0-must-gather-output\") pod \"must-gather-m55fg\" (UID: \"caa366f9-42ff-44f5-abab-e68bbef875d0\") " pod="openshift-must-gather-8mmnm/must-gather-m55fg" Nov 24 09:03:06 crc kubenswrapper[4718]: I1124 09:03:06.125147 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p62tm\" (UniqueName: \"kubernetes.io/projected/caa366f9-42ff-44f5-abab-e68bbef875d0-kube-api-access-p62tm\") pod \"must-gather-m55fg\" (UID: \"caa366f9-42ff-44f5-abab-e68bbef875d0\") " pod="openshift-must-gather-8mmnm/must-gather-m55fg" Nov 24 09:03:06 crc kubenswrapper[4718]: I1124 09:03:06.125195 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/caa366f9-42ff-44f5-abab-e68bbef875d0-must-gather-output\") pod \"must-gather-m55fg\" (UID: \"caa366f9-42ff-44f5-abab-e68bbef875d0\") " pod="openshift-must-gather-8mmnm/must-gather-m55fg" Nov 24 09:03:06 crc kubenswrapper[4718]: I1124 09:03:06.125633 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/caa366f9-42ff-44f5-abab-e68bbef875d0-must-gather-output\") pod \"must-gather-m55fg\" (UID: \"caa366f9-42ff-44f5-abab-e68bbef875d0\") " pod="openshift-must-gather-8mmnm/must-gather-m55fg" Nov 24 09:03:06 crc kubenswrapper[4718]: I1124 09:03:06.153157 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p62tm\" (UniqueName: \"kubernetes.io/projected/caa366f9-42ff-44f5-abab-e68bbef875d0-kube-api-access-p62tm\") pod \"must-gather-m55fg\" (UID: \"caa366f9-42ff-44f5-abab-e68bbef875d0\") " pod="openshift-must-gather-8mmnm/must-gather-m55fg" Nov 24 09:03:06 crc kubenswrapper[4718]: I1124 09:03:06.256248 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8mmnm/must-gather-m55fg" Nov 24 09:03:06 crc kubenswrapper[4718]: I1124 09:03:06.691223 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-8mmnm/must-gather-m55fg"] Nov 24 09:03:07 crc kubenswrapper[4718]: I1124 09:03:07.250770 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8mmnm/must-gather-m55fg" event={"ID":"caa366f9-42ff-44f5-abab-e68bbef875d0","Type":"ContainerStarted","Data":"ec436647ccc44cbbe0afaae07b0ea809a7b3095b33fd78a75bb003a81455e42c"} Nov 24 09:03:13 crc kubenswrapper[4718]: I1124 09:03:13.301867 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8mmnm/must-gather-m55fg" event={"ID":"caa366f9-42ff-44f5-abab-e68bbef875d0","Type":"ContainerStarted","Data":"1f2e78b27358656a8ed32e7d3213bf2eaefa381ee6a85496aa8b2be57269dfd3"} Nov 24 09:03:13 crc kubenswrapper[4718]: I1124 09:03:13.302439 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8mmnm/must-gather-m55fg" event={"ID":"caa366f9-42ff-44f5-abab-e68bbef875d0","Type":"ContainerStarted","Data":"2178def47b5245cf8b3869a6187af70b956c5780892d67c4ec9fc60a3317bd17"} Nov 24 09:03:13 crc kubenswrapper[4718]: I1124 09:03:13.319623 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-8mmnm/must-gather-m55fg" podStartSLOduration=2.342890505 podStartE2EDuration="8.319605584s" podCreationTimestamp="2025-11-24 09:03:05 +0000 UTC" firstStartedPulling="2025-11-24 09:03:06.697821953 +0000 UTC m=+1658.814112847" lastFinishedPulling="2025-11-24 09:03:12.674537022 +0000 UTC m=+1664.790827926" observedRunningTime="2025-11-24 09:03:13.314887488 +0000 UTC m=+1665.431178392" watchObservedRunningTime="2025-11-24 09:03:13.319605584 +0000 UTC m=+1665.435896488" Nov 24 09:03:19 crc kubenswrapper[4718]: I1124 09:03:19.595828 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:03:19 crc kubenswrapper[4718]: E1124 09:03:19.596576 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:03:31 crc kubenswrapper[4718]: I1124 09:03:31.596862 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:03:31 crc kubenswrapper[4718]: E1124 09:03:31.597821 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:03:42 crc kubenswrapper[4718]: I1124 09:03:42.596828 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:03:42 crc kubenswrapper[4718]: E1124 09:03:42.597689 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:03:44 crc kubenswrapper[4718]: I1124 09:03:44.979935 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr_369e75e0-4d80-470a-8524-785ff06f217e/util/0.log" Nov 24 09:03:45 crc kubenswrapper[4718]: I1124 09:03:45.134476 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr_369e75e0-4d80-470a-8524-785ff06f217e/util/0.log" Nov 24 09:03:45 crc kubenswrapper[4718]: I1124 09:03:45.144894 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr_369e75e0-4d80-470a-8524-785ff06f217e/pull/0.log" Nov 24 09:03:45 crc kubenswrapper[4718]: I1124 09:03:45.153133 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr_369e75e0-4d80-470a-8524-785ff06f217e/pull/0.log" Nov 24 09:03:45 crc kubenswrapper[4718]: I1124 09:03:45.348478 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr_369e75e0-4d80-470a-8524-785ff06f217e/pull/0.log" Nov 24 09:03:45 crc kubenswrapper[4718]: I1124 09:03:45.353093 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr_369e75e0-4d80-470a-8524-785ff06f217e/util/0.log" Nov 24 09:03:45 crc kubenswrapper[4718]: I1124 09:03:45.384107 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_40d7728e5bcbb667661f7fea54db0fc26a6bdadd6ec52b5ae07147778akh6nr_369e75e0-4d80-470a-8524-785ff06f217e/extract/0.log" Nov 24 09:03:45 crc kubenswrapper[4718]: I1124 09:03:45.516010 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc_8bde562b-ba15-48c3-a959-bb7a2efb1ad1/util/0.log" Nov 24 09:03:45 crc kubenswrapper[4718]: I1124 09:03:45.676222 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc_8bde562b-ba15-48c3-a959-bb7a2efb1ad1/pull/0.log" Nov 24 09:03:45 crc kubenswrapper[4718]: I1124 09:03:45.714168 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc_8bde562b-ba15-48c3-a959-bb7a2efb1ad1/pull/0.log" Nov 24 09:03:45 crc kubenswrapper[4718]: I1124 09:03:45.720584 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc_8bde562b-ba15-48c3-a959-bb7a2efb1ad1/util/0.log" Nov 24 09:03:45 crc kubenswrapper[4718]: I1124 09:03:45.883447 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc_8bde562b-ba15-48c3-a959-bb7a2efb1ad1/util/0.log" Nov 24 09:03:45 crc kubenswrapper[4718]: I1124 09:03:45.911243 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc_8bde562b-ba15-48c3-a959-bb7a2efb1ad1/pull/0.log" Nov 24 09:03:45 crc kubenswrapper[4718]: I1124 09:03:45.949239 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_440bfe09fdefc314a717f31a57d7ebfa5bfc58c8ed9bd7a4e0a57fadb4gzlzc_8bde562b-ba15-48c3-a959-bb7a2efb1ad1/extract/0.log" Nov 24 09:03:46 crc kubenswrapper[4718]: I1124 09:03:46.070221 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb_5241e0fd-f18b-4c8d-aa12-e5d61a29082e/util/0.log" Nov 24 09:03:46 crc kubenswrapper[4718]: I1124 09:03:46.199484 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb_5241e0fd-f18b-4c8d-aa12-e5d61a29082e/util/0.log" Nov 24 09:03:46 crc kubenswrapper[4718]: I1124 09:03:46.211706 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb_5241e0fd-f18b-4c8d-aa12-e5d61a29082e/pull/0.log" Nov 24 09:03:46 crc kubenswrapper[4718]: I1124 09:03:46.211783 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb_5241e0fd-f18b-4c8d-aa12-e5d61a29082e/pull/0.log" Nov 24 09:03:46 crc kubenswrapper[4718]: I1124 09:03:46.389484 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb_5241e0fd-f18b-4c8d-aa12-e5d61a29082e/util/0.log" Nov 24 09:03:46 crc kubenswrapper[4718]: I1124 09:03:46.412664 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb_5241e0fd-f18b-4c8d-aa12-e5d61a29082e/pull/0.log" Nov 24 09:03:46 crc kubenswrapper[4718]: I1124 09:03:46.493500 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_62924b48d7c39bd6b89ba2946b1b573a02aaa01e9968a59c70da1ba1d69gwjb_5241e0fd-f18b-4c8d-aa12-e5d61a29082e/extract/0.log" Nov 24 09:03:46 crc kubenswrapper[4718]: I1124 09:03:46.687176 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx_db9da525-6b14-44ba-8895-8b862aa9f66a/util/0.log" Nov 24 09:03:46 crc kubenswrapper[4718]: I1124 09:03:46.856538 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx_db9da525-6b14-44ba-8895-8b862aa9f66a/util/0.log" Nov 24 09:03:46 crc kubenswrapper[4718]: I1124 09:03:46.881432 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx_db9da525-6b14-44ba-8895-8b862aa9f66a/pull/0.log" Nov 24 09:03:46 crc kubenswrapper[4718]: I1124 09:03:46.911012 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx_db9da525-6b14-44ba-8895-8b862aa9f66a/pull/0.log" Nov 24 09:03:47 crc kubenswrapper[4718]: I1124 09:03:47.060575 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx_db9da525-6b14-44ba-8895-8b862aa9f66a/util/0.log" Nov 24 09:03:47 crc kubenswrapper[4718]: I1124 09:03:47.073104 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx_db9da525-6b14-44ba-8895-8b862aa9f66a/pull/0.log" Nov 24 09:03:47 crc kubenswrapper[4718]: I1124 09:03:47.086828 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27cb6tdx_db9da525-6b14-44ba-8895-8b862aa9f66a/extract/0.log" Nov 24 09:03:47 crc kubenswrapper[4718]: I1124 09:03:47.238180 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk_595c017b-b3a6-41ef-aaba-4aa42c28da88/util/0.log" Nov 24 09:03:47 crc kubenswrapper[4718]: I1124 09:03:47.397366 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk_595c017b-b3a6-41ef-aaba-4aa42c28da88/pull/0.log" Nov 24 09:03:47 crc kubenswrapper[4718]: I1124 09:03:47.404811 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk_595c017b-b3a6-41ef-aaba-4aa42c28da88/pull/0.log" Nov 24 09:03:47 crc kubenswrapper[4718]: I1124 09:03:47.416169 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk_595c017b-b3a6-41ef-aaba-4aa42c28da88/util/0.log" Nov 24 09:03:47 crc kubenswrapper[4718]: I1124 09:03:47.615532 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk_595c017b-b3a6-41ef-aaba-4aa42c28da88/pull/0.log" Nov 24 09:03:47 crc kubenswrapper[4718]: I1124 09:03:47.616659 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk_595c017b-b3a6-41ef-aaba-4aa42c28da88/util/0.log" Nov 24 09:03:47 crc kubenswrapper[4718]: I1124 09:03:47.632504 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xn7zk_595c017b-b3a6-41ef-aaba-4aa42c28da88/extract/0.log" Nov 24 09:03:47 crc kubenswrapper[4718]: I1124 09:03:47.778011 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb_4c50fe6a-f5ce-4cc1-8044-a378ec1b740b/util/0.log" Nov 24 09:03:47 crc kubenswrapper[4718]: I1124 09:03:47.927632 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb_4c50fe6a-f5ce-4cc1-8044-a378ec1b740b/util/0.log" Nov 24 09:03:47 crc kubenswrapper[4718]: I1124 09:03:47.934346 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb_4c50fe6a-f5ce-4cc1-8044-a378ec1b740b/pull/0.log" Nov 24 09:03:47 crc kubenswrapper[4718]: I1124 09:03:47.934788 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb_4c50fe6a-f5ce-4cc1-8044-a378ec1b740b/pull/0.log" Nov 24 09:03:48 crc kubenswrapper[4718]: I1124 09:03:48.098526 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb_4c50fe6a-f5ce-4cc1-8044-a378ec1b740b/pull/0.log" Nov 24 09:03:48 crc kubenswrapper[4718]: I1124 09:03:48.102982 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb_4c50fe6a-f5ce-4cc1-8044-a378ec1b740b/extract/0.log" Nov 24 09:03:48 crc kubenswrapper[4718]: I1124 09:03:48.143234 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enmflb_4c50fe6a-f5ce-4cc1-8044-a378ec1b740b/util/0.log" Nov 24 09:03:48 crc kubenswrapper[4718]: I1124 09:03:48.186465 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82_e641cf59-bf41-4b81-a0ac-75e8a61baf37/util/0.log" Nov 24 09:03:48 crc kubenswrapper[4718]: I1124 09:03:48.286957 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82_e641cf59-bf41-4b81-a0ac-75e8a61baf37/pull/0.log" Nov 24 09:03:48 crc kubenswrapper[4718]: I1124 09:03:48.298394 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82_e641cf59-bf41-4b81-a0ac-75e8a61baf37/util/0.log" Nov 24 09:03:48 crc kubenswrapper[4718]: I1124 09:03:48.360819 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82_e641cf59-bf41-4b81-a0ac-75e8a61baf37/pull/0.log" Nov 24 09:03:48 crc kubenswrapper[4718]: I1124 09:03:48.495018 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82_e641cf59-bf41-4b81-a0ac-75e8a61baf37/util/0.log" Nov 24 09:03:48 crc kubenswrapper[4718]: I1124 09:03:48.497391 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82_e641cf59-bf41-4b81-a0ac-75e8a61baf37/pull/0.log" Nov 24 09:03:48 crc kubenswrapper[4718]: I1124 09:03:48.503332 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcc2r82_e641cf59-bf41-4b81-a0ac-75e8a61baf37/extract/0.log" Nov 24 09:03:48 crc kubenswrapper[4718]: I1124 09:03:48.535879 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7b4d7bbb6c-pllns_632a00ec-d777-43a0-ac83-f3543055e722/kube-rbac-proxy/0.log" Nov 24 09:03:48 crc kubenswrapper[4718]: I1124 09:03:48.688807 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-index-lmfm8_d2bb363b-e3a7-4a05-a44a-62bbf3b7c879/registry-server/0.log" Nov 24 09:03:48 crc kubenswrapper[4718]: I1124 09:03:48.715827 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-648556d4d5-vv9mf_5114618f-d8e8-4006-8e3b-4c13e4aa9748/kube-rbac-proxy/0.log" Nov 24 09:03:48 crc kubenswrapper[4718]: I1124 09:03:48.758287 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7b4d7bbb6c-pllns_632a00ec-d777-43a0-ac83-f3543055e722/manager/0.log" Nov 24 09:03:48 crc kubenswrapper[4718]: I1124 09:03:48.859547 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-648556d4d5-vv9mf_5114618f-d8e8-4006-8e3b-4c13e4aa9748/manager/0.log" Nov 24 09:03:48 crc kubenswrapper[4718]: I1124 09:03:48.911393 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-index-mqctp_dbc3b982-3c01-4ad2-bb17-3be63e5fd3d3/registry-server/0.log" Nov 24 09:03:48 crc kubenswrapper[4718]: I1124 09:03:48.972219 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5f64d8d556-c775h_84f2adaf-ef3d-45a9-b471-51b99a01773b/kube-rbac-proxy/0.log" Nov 24 09:03:49 crc kubenswrapper[4718]: I1124 09:03:49.070869 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5f64d8d556-c775h_84f2adaf-ef3d-45a9-b471-51b99a01773b/manager/0.log" Nov 24 09:03:49 crc kubenswrapper[4718]: I1124 09:03:49.108549 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-index-snq4h_420604f4-8485-4afa-a167-ca61c1c63981/registry-server/0.log" Nov 24 09:03:49 crc kubenswrapper[4718]: I1124 09:03:49.167826 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-dfbd56c94-wc9zd_50aceec1-fd1a-4728-abad-a3ed31345a27/kube-rbac-proxy/0.log" Nov 24 09:03:49 crc kubenswrapper[4718]: I1124 09:03:49.272046 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-dfbd56c94-wc9zd_50aceec1-fd1a-4728-abad-a3ed31345a27/manager/0.log" Nov 24 09:03:49 crc kubenswrapper[4718]: I1124 09:03:49.299103 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-index-ssr4h_435e52bb-774d-4264-a126-a362323f96d9/registry-server/0.log" Nov 24 09:03:49 crc kubenswrapper[4718]: I1124 09:03:49.407026 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-65bfd68697-jw2bq_2da1235c-b60e-4440-88bb-6600e6b61308/kube-rbac-proxy/0.log" Nov 24 09:03:49 crc kubenswrapper[4718]: I1124 09:03:49.471099 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-65bfd68697-jw2bq_2da1235c-b60e-4440-88bb-6600e6b61308/manager/0.log" Nov 24 09:03:49 crc kubenswrapper[4718]: I1124 09:03:49.524483 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-index-69p5f_98219050-8eff-435a-9511-d33ce4e58619/registry-server/0.log" Nov 24 09:03:49 crc kubenswrapper[4718]: I1124 09:03:49.626565 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-779fc9694b-zndb4_b7b86409-7678-4f44-b87a-09837009fe67/operator/0.log" Nov 24 09:03:49 crc kubenswrapper[4718]: I1124 09:03:49.671805 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-index-vv2ld_f6f10607-65c4-4502-8aac-b9f26461a142/registry-server/0.log" Nov 24 09:03:49 crc kubenswrapper[4718]: I1124 09:03:49.804371 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-c4c6f6d75-kgf55_9e3d8607-7623-4fa8-bc89-d39fb3e438a1/kube-rbac-proxy/0.log" Nov 24 09:03:49 crc kubenswrapper[4718]: I1124 09:03:49.829672 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-c4c6f6d75-kgf55_9e3d8607-7623-4fa8-bc89-d39fb3e438a1/manager/0.log" Nov 24 09:03:49 crc kubenswrapper[4718]: I1124 09:03:49.906288 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-index-7vr99_7deab911-994f-4484-8961-fd426a52aa55/registry-server/0.log" Nov 24 09:03:54 crc kubenswrapper[4718]: I1124 09:03:54.596450 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:03:54 crc kubenswrapper[4718]: E1124 09:03:54.598440 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:04:03 crc kubenswrapper[4718]: I1124 09:04:03.221402 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-kzq5s_f1f4765d-f9d8-4590-99b4-e1e0823424cd/control-plane-machine-set-operator/0.log" Nov 24 09:04:03 crc kubenswrapper[4718]: I1124 09:04:03.369401 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-7hkr4_4b5d227e-1a6b-466b-b380-1e5f7d407e0f/kube-rbac-proxy/0.log" Nov 24 09:04:03 crc kubenswrapper[4718]: I1124 09:04:03.418461 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-7hkr4_4b5d227e-1a6b-466b-b380-1e5f7d407e0f/machine-api-operator/0.log" Nov 24 09:04:09 crc kubenswrapper[4718]: I1124 09:04:09.596674 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:04:09 crc kubenswrapper[4718]: E1124 09:04:09.597399 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:04:17 crc kubenswrapper[4718]: I1124 09:04:17.947541 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-c4qp8_a7222fa5-30a4-4387-9417-6e38f5f2f651/kube-rbac-proxy/0.log" Nov 24 09:04:18 crc kubenswrapper[4718]: I1124 09:04:18.042945 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-c4qp8_a7222fa5-30a4-4387-9417-6e38f5f2f651/controller/0.log" Nov 24 09:04:18 crc kubenswrapper[4718]: I1124 09:04:18.158375 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hx65l_e349dcb4-6ed5-4eac-bf4e-cc569cced0bf/cp-frr-files/0.log" Nov 24 09:04:18 crc kubenswrapper[4718]: I1124 09:04:18.281126 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hx65l_e349dcb4-6ed5-4eac-bf4e-cc569cced0bf/cp-frr-files/0.log" Nov 24 09:04:18 crc kubenswrapper[4718]: I1124 09:04:18.281922 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hx65l_e349dcb4-6ed5-4eac-bf4e-cc569cced0bf/cp-reloader/0.log" Nov 24 09:04:18 crc kubenswrapper[4718]: I1124 09:04:18.340858 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hx65l_e349dcb4-6ed5-4eac-bf4e-cc569cced0bf/cp-metrics/0.log" Nov 24 09:04:18 crc kubenswrapper[4718]: I1124 09:04:18.376565 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hx65l_e349dcb4-6ed5-4eac-bf4e-cc569cced0bf/cp-reloader/0.log" Nov 24 09:04:18 crc kubenswrapper[4718]: I1124 09:04:18.495369 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hx65l_e349dcb4-6ed5-4eac-bf4e-cc569cced0bf/cp-metrics/0.log" Nov 24 09:04:18 crc kubenswrapper[4718]: I1124 09:04:18.510820 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hx65l_e349dcb4-6ed5-4eac-bf4e-cc569cced0bf/cp-reloader/0.log" Nov 24 09:04:18 crc kubenswrapper[4718]: I1124 09:04:18.560644 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hx65l_e349dcb4-6ed5-4eac-bf4e-cc569cced0bf/cp-frr-files/0.log" Nov 24 09:04:18 crc kubenswrapper[4718]: I1124 09:04:18.577192 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hx65l_e349dcb4-6ed5-4eac-bf4e-cc569cced0bf/cp-metrics/0.log" Nov 24 09:04:18 crc kubenswrapper[4718]: I1124 09:04:18.731377 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hx65l_e349dcb4-6ed5-4eac-bf4e-cc569cced0bf/cp-reloader/0.log" Nov 24 09:04:18 crc kubenswrapper[4718]: I1124 09:04:18.751647 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hx65l_e349dcb4-6ed5-4eac-bf4e-cc569cced0bf/cp-metrics/0.log" Nov 24 09:04:18 crc kubenswrapper[4718]: I1124 09:04:18.762023 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hx65l_e349dcb4-6ed5-4eac-bf4e-cc569cced0bf/controller/0.log" Nov 24 09:04:18 crc kubenswrapper[4718]: I1124 09:04:18.763171 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hx65l_e349dcb4-6ed5-4eac-bf4e-cc569cced0bf/cp-frr-files/0.log" Nov 24 09:04:18 crc kubenswrapper[4718]: I1124 09:04:18.939122 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hx65l_e349dcb4-6ed5-4eac-bf4e-cc569cced0bf/frr-metrics/0.log" Nov 24 09:04:18 crc kubenswrapper[4718]: I1124 09:04:18.954834 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hx65l_e349dcb4-6ed5-4eac-bf4e-cc569cced0bf/kube-rbac-proxy/0.log" Nov 24 09:04:18 crc kubenswrapper[4718]: I1124 09:04:18.964936 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hx65l_e349dcb4-6ed5-4eac-bf4e-cc569cced0bf/kube-rbac-proxy-frr/0.log" Nov 24 09:04:19 crc kubenswrapper[4718]: I1124 09:04:19.146404 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hx65l_e349dcb4-6ed5-4eac-bf4e-cc569cced0bf/reloader/0.log" Nov 24 09:04:19 crc kubenswrapper[4718]: I1124 09:04:19.333727 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-6998585d5-4j4sx_4fe355ed-c72c-47f5-9d75-9c8a93614d5d/frr-k8s-webhook-server/0.log" Nov 24 09:04:19 crc kubenswrapper[4718]: I1124 09:04:19.342467 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hx65l_e349dcb4-6ed5-4eac-bf4e-cc569cced0bf/frr/0.log" Nov 24 09:04:19 crc kubenswrapper[4718]: I1124 09:04:19.490727 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-667b6d8949-jgz4v_95692fb9-b77a-4c2f-8263-c726a880f5d8/manager/0.log" Nov 24 09:04:19 crc kubenswrapper[4718]: I1124 09:04:19.537000 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-d745c75f7-xqzh8_2a38fc4c-62ec-4435-a15b-7b771d914c3e/webhook-server/0.log" Nov 24 09:04:19 crc kubenswrapper[4718]: I1124 09:04:19.692784 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-kvdb9_74c28e57-44c0-4992-ad63-4a291c2fa10f/kube-rbac-proxy/0.log" Nov 24 09:04:19 crc kubenswrapper[4718]: I1124 09:04:19.818857 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-kvdb9_74c28e57-44c0-4992-ad63-4a291c2fa10f/speaker/0.log" Nov 24 09:04:21 crc kubenswrapper[4718]: I1124 09:04:21.596531 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:04:21 crc kubenswrapper[4718]: E1124 09:04:21.596967 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:04:32 crc kubenswrapper[4718]: I1124 09:04:32.413820 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-7b23-account-create-update-s5bmh_798e367e-b020-428b-a39d-36fd0f8a0082/mariadb-account-create-update/0.log" Nov 24 09:04:32 crc kubenswrapper[4718]: I1124 09:04:32.664143 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-cache-glance-default-external-api-0-cleaner-2939958kl49l_d585b8bf-d86a-40bd-b6ea-08fe8a8113ab/glance-cache-glance-default-external-api-0-cleaner/0.log" Nov 24 09:04:32 crc kubenswrapper[4718]: I1124 09:04:32.701362 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-cache-glance-default-internal-api-0-cleaner-2939958nlxpm_fda669d9-910c-4494-b5bd-29658bc86875/glance-cache-glance-default-internal-api-0-cleaner/0.log" Nov 24 09:04:33 crc kubenswrapper[4718]: I1124 09:04:33.052500 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-db-create-b6tpd_a2bd0da2-6ed3-47c4-9984-3c48e00fddad/mariadb-database-create/0.log" Nov 24 09:04:33 crc kubenswrapper[4718]: I1124 09:04:33.146726 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-db-sync-xs54k_94a221c9-1794-4a91-b03a-c42a9bdcfcb9/glance-db-sync/0.log" Nov 24 09:04:33 crc kubenswrapper[4718]: I1124 09:04:33.270614 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-default-external-api-0_9bb2aba4-18b9-410b-bf51-57714efd5c42/glance-httpd/0.log" Nov 24 09:04:33 crc kubenswrapper[4718]: I1124 09:04:33.311008 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-default-external-api-0_9bb2aba4-18b9-410b-bf51-57714efd5c42/glance-api/0.log" Nov 24 09:04:33 crc kubenswrapper[4718]: I1124 09:04:33.370840 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-default-external-api-0_9bb2aba4-18b9-410b-bf51-57714efd5c42/glance-log/0.log" Nov 24 09:04:33 crc kubenswrapper[4718]: I1124 09:04:33.478184 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-default-internal-api-0_0ae8afba-57a2-4856-abc2-923d7f6c609d/glance-api/0.log" Nov 24 09:04:33 crc kubenswrapper[4718]: I1124 09:04:33.553477 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-default-internal-api-0_0ae8afba-57a2-4856-abc2-923d7f6c609d/glance-httpd/0.log" Nov 24 09:04:33 crc kubenswrapper[4718]: I1124 09:04:33.613775 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-default-internal-api-0_0ae8afba-57a2-4856-abc2-923d7f6c609d/glance-log/0.log" Nov 24 09:04:33 crc kubenswrapper[4718]: I1124 09:04:33.886032 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_keystone-cron-29399581-q4mhh_0e41c559-7a83-477a-a86f-1cfd626da128/keystone-cron/0.log" Nov 24 09:04:33 crc kubenswrapper[4718]: I1124 09:04:33.957765 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_keystone-668c488b5b-8jjjq_369cd9aa-3e04-4677-b0a4-f9e3422f7944/keystone-api/0.log" Nov 24 09:04:34 crc kubenswrapper[4718]: I1124 09:04:34.150750 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-0_e129bbf2-f4d7-42c1-84b3-80338a6fafd4/mysql-bootstrap/0.log" Nov 24 09:04:34 crc kubenswrapper[4718]: I1124 09:04:34.358452 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-0_e129bbf2-f4d7-42c1-84b3-80338a6fafd4/mysql-bootstrap/0.log" Nov 24 09:04:34 crc kubenswrapper[4718]: I1124 09:04:34.420787 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-0_e129bbf2-f4d7-42c1-84b3-80338a6fafd4/galera/0.log" Nov 24 09:04:34 crc kubenswrapper[4718]: I1124 09:04:34.596043 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:04:34 crc kubenswrapper[4718]: E1124 09:04:34.596412 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:04:34 crc kubenswrapper[4718]: I1124 09:04:34.610095 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-1_3e87139f-12e2-45d7-8401-ae56813c9829/mysql-bootstrap/0.log" Nov 24 09:04:34 crc kubenswrapper[4718]: I1124 09:04:34.812236 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-1_3e87139f-12e2-45d7-8401-ae56813c9829/galera/0.log" Nov 24 09:04:34 crc kubenswrapper[4718]: I1124 09:04:34.816241 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-1_3e87139f-12e2-45d7-8401-ae56813c9829/mysql-bootstrap/0.log" Nov 24 09:04:35 crc kubenswrapper[4718]: I1124 09:04:35.002730 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-2_2774aeea-619b-4e69-9927-95b17dcc9704/mysql-bootstrap/0.log" Nov 24 09:04:35 crc kubenswrapper[4718]: I1124 09:04:35.197152 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-2_2774aeea-619b-4e69-9927-95b17dcc9704/mysql-bootstrap/0.log" Nov 24 09:04:35 crc kubenswrapper[4718]: I1124 09:04:35.219798 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-2_2774aeea-619b-4e69-9927-95b17dcc9704/galera/0.log" Nov 24 09:04:35 crc kubenswrapper[4718]: I1124 09:04:35.381315 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_memcached-0_b90b6ef5-488d-4524-9c45-ac92728bfb71/memcached/0.log" Nov 24 09:04:35 crc kubenswrapper[4718]: I1124 09:04:35.403245 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstackclient_ee8580df-7780-4709-89f4-c1c8b7222187/openstackclient/0.log" Nov 24 09:04:35 crc kubenswrapper[4718]: I1124 09:04:35.600940 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_rabbitmq-server-0_71536c12-d65b-479d-b35f-43579c0c2e91/setup-container/0.log" Nov 24 09:04:35 crc kubenswrapper[4718]: I1124 09:04:35.796926 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_rabbitmq-server-0_71536c12-d65b-479d-b35f-43579c0c2e91/setup-container/0.log" Nov 24 09:04:35 crc kubenswrapper[4718]: I1124 09:04:35.833141 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_rabbitmq-server-0_71536c12-d65b-479d-b35f-43579c0c2e91/rabbitmq/0.log" Nov 24 09:04:35 crc kubenswrapper[4718]: I1124 09:04:35.883943 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-proxy-547856594f-nf4j4_62d23e96-0761-4def-909e-dd0027504b8e/proxy-httpd/0.log" Nov 24 09:04:35 crc kubenswrapper[4718]: I1124 09:04:35.977125 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-proxy-547856594f-nf4j4_62d23e96-0761-4def-909e-dd0027504b8e/proxy-server/0.log" Nov 24 09:04:35 crc kubenswrapper[4718]: I1124 09:04:35.991797 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-ring-rebalance-jmbxx_7682a690-8f1e-435b-b29d-5d6e8c60676b/swift-ring-rebalance/0.log" Nov 24 09:04:36 crc kubenswrapper[4718]: I1124 09:04:36.168326 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_883e6594-9051-4dae-85fc-5f7d8bf60bab/account-replicator/0.log" Nov 24 09:04:36 crc kubenswrapper[4718]: I1124 09:04:36.181224 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_883e6594-9051-4dae-85fc-5f7d8bf60bab/account-reaper/0.log" Nov 24 09:04:36 crc kubenswrapper[4718]: I1124 09:04:36.192175 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_883e6594-9051-4dae-85fc-5f7d8bf60bab/account-auditor/0.log" Nov 24 09:04:36 crc kubenswrapper[4718]: I1124 09:04:36.263751 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_883e6594-9051-4dae-85fc-5f7d8bf60bab/account-server/0.log" Nov 24 09:04:36 crc kubenswrapper[4718]: I1124 09:04:36.322212 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_883e6594-9051-4dae-85fc-5f7d8bf60bab/container-auditor/0.log" Nov 24 09:04:36 crc kubenswrapper[4718]: I1124 09:04:36.327064 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_883e6594-9051-4dae-85fc-5f7d8bf60bab/container-replicator/0.log" Nov 24 09:04:36 crc kubenswrapper[4718]: I1124 09:04:36.357701 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_883e6594-9051-4dae-85fc-5f7d8bf60bab/container-server/0.log" Nov 24 09:04:36 crc kubenswrapper[4718]: I1124 09:04:36.386427 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_883e6594-9051-4dae-85fc-5f7d8bf60bab/container-updater/0.log" Nov 24 09:04:36 crc kubenswrapper[4718]: I1124 09:04:36.445229 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_883e6594-9051-4dae-85fc-5f7d8bf60bab/object-auditor/0.log" Nov 24 09:04:36 crc kubenswrapper[4718]: I1124 09:04:36.497826 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_883e6594-9051-4dae-85fc-5f7d8bf60bab/object-expirer/0.log" Nov 24 09:04:36 crc kubenswrapper[4718]: I1124 09:04:36.502625 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_883e6594-9051-4dae-85fc-5f7d8bf60bab/object-replicator/0.log" Nov 24 09:04:36 crc kubenswrapper[4718]: I1124 09:04:36.540131 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_883e6594-9051-4dae-85fc-5f7d8bf60bab/object-server/0.log" Nov 24 09:04:36 crc kubenswrapper[4718]: I1124 09:04:36.600541 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_883e6594-9051-4dae-85fc-5f7d8bf60bab/object-updater/0.log" Nov 24 09:04:36 crc kubenswrapper[4718]: I1124 09:04:36.653814 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_883e6594-9051-4dae-85fc-5f7d8bf60bab/rsync/0.log" Nov 24 09:04:36 crc kubenswrapper[4718]: I1124 09:04:36.681804 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_883e6594-9051-4dae-85fc-5f7d8bf60bab/swift-recon-cron/0.log" Nov 24 09:04:42 crc kubenswrapper[4718]: I1124 09:04:42.902065 4718 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fvqgs"] Nov 24 09:04:42 crc kubenswrapper[4718]: I1124 09:04:42.904023 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fvqgs" Nov 24 09:04:42 crc kubenswrapper[4718]: I1124 09:04:42.927273 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fvqgs"] Nov 24 09:04:43 crc kubenswrapper[4718]: I1124 09:04:43.015234 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2-catalog-content\") pod \"redhat-marketplace-fvqgs\" (UID: \"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2\") " pod="openshift-marketplace/redhat-marketplace-fvqgs" Nov 24 09:04:43 crc kubenswrapper[4718]: I1124 09:04:43.015320 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rmww\" (UniqueName: \"kubernetes.io/projected/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2-kube-api-access-7rmww\") pod \"redhat-marketplace-fvqgs\" (UID: \"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2\") " pod="openshift-marketplace/redhat-marketplace-fvqgs" Nov 24 09:04:43 crc kubenswrapper[4718]: I1124 09:04:43.015352 4718 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2-utilities\") pod \"redhat-marketplace-fvqgs\" (UID: \"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2\") " pod="openshift-marketplace/redhat-marketplace-fvqgs" Nov 24 09:04:43 crc kubenswrapper[4718]: I1124 09:04:43.116638 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rmww\" (UniqueName: \"kubernetes.io/projected/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2-kube-api-access-7rmww\") pod \"redhat-marketplace-fvqgs\" (UID: \"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2\") " pod="openshift-marketplace/redhat-marketplace-fvqgs" Nov 24 09:04:43 crc kubenswrapper[4718]: I1124 09:04:43.116690 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2-utilities\") pod \"redhat-marketplace-fvqgs\" (UID: \"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2\") " pod="openshift-marketplace/redhat-marketplace-fvqgs" Nov 24 09:04:43 crc kubenswrapper[4718]: I1124 09:04:43.116777 4718 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2-catalog-content\") pod \"redhat-marketplace-fvqgs\" (UID: \"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2\") " pod="openshift-marketplace/redhat-marketplace-fvqgs" Nov 24 09:04:43 crc kubenswrapper[4718]: I1124 09:04:43.117326 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2-catalog-content\") pod \"redhat-marketplace-fvqgs\" (UID: \"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2\") " pod="openshift-marketplace/redhat-marketplace-fvqgs" Nov 24 09:04:43 crc kubenswrapper[4718]: I1124 09:04:43.117386 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2-utilities\") pod \"redhat-marketplace-fvqgs\" (UID: \"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2\") " pod="openshift-marketplace/redhat-marketplace-fvqgs" Nov 24 09:04:43 crc kubenswrapper[4718]: I1124 09:04:43.144945 4718 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rmww\" (UniqueName: \"kubernetes.io/projected/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2-kube-api-access-7rmww\") pod \"redhat-marketplace-fvqgs\" (UID: \"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2\") " pod="openshift-marketplace/redhat-marketplace-fvqgs" Nov 24 09:04:43 crc kubenswrapper[4718]: I1124 09:04:43.219728 4718 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fvqgs" Nov 24 09:04:43 crc kubenswrapper[4718]: I1124 09:04:43.669613 4718 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fvqgs"] Nov 24 09:04:43 crc kubenswrapper[4718]: I1124 09:04:43.914483 4718 generic.go:334] "Generic (PLEG): container finished" podID="ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2" containerID="e0f1508d760cbeef108e51322d29655435b77c24e2796c408a1a0042ae93273e" exitCode=0 Nov 24 09:04:43 crc kubenswrapper[4718]: I1124 09:04:43.914587 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fvqgs" event={"ID":"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2","Type":"ContainerDied","Data":"e0f1508d760cbeef108e51322d29655435b77c24e2796c408a1a0042ae93273e"} Nov 24 09:04:43 crc kubenswrapper[4718]: I1124 09:04:43.914863 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fvqgs" event={"ID":"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2","Type":"ContainerStarted","Data":"435bb33135842a936263b5b086a0f5799abf3b5ffe5f770f8ff1d31e9f48bc57"} Nov 24 09:04:43 crc kubenswrapper[4718]: I1124 09:04:43.916677 4718 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 09:04:44 crc kubenswrapper[4718]: I1124 09:04:44.923615 4718 generic.go:334] "Generic (PLEG): container finished" podID="ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2" containerID="441b61fff1a28aa832efbded9e177dc83450331944df84ac7750127285f888d9" exitCode=0 Nov 24 09:04:44 crc kubenswrapper[4718]: I1124 09:04:44.923711 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fvqgs" event={"ID":"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2","Type":"ContainerDied","Data":"441b61fff1a28aa832efbded9e177dc83450331944df84ac7750127285f888d9"} Nov 24 09:04:45 crc kubenswrapper[4718]: I1124 09:04:45.933202 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fvqgs" event={"ID":"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2","Type":"ContainerStarted","Data":"dc98f08beafd492918af841a0646a0b5bd7759757c97079eefb45ff7dda9c357"} Nov 24 09:04:45 crc kubenswrapper[4718]: I1124 09:04:45.958066 4718 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fvqgs" podStartSLOduration=2.461763676 podStartE2EDuration="3.958043751s" podCreationTimestamp="2025-11-24 09:04:42 +0000 UTC" firstStartedPulling="2025-11-24 09:04:43.916426291 +0000 UTC m=+1756.032717195" lastFinishedPulling="2025-11-24 09:04:45.412706356 +0000 UTC m=+1757.528997270" observedRunningTime="2025-11-24 09:04:45.950721651 +0000 UTC m=+1758.067012555" watchObservedRunningTime="2025-11-24 09:04:45.958043751 +0000 UTC m=+1758.074334665" Nov 24 09:04:47 crc kubenswrapper[4718]: I1124 09:04:47.597183 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:04:47 crc kubenswrapper[4718]: E1124 09:04:47.597726 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:04:48 crc kubenswrapper[4718]: I1124 09:04:48.838532 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5csg_b22c865e-0000-410a-a062-d994e40b806b/extract-utilities/0.log" Nov 24 09:04:49 crc kubenswrapper[4718]: I1124 09:04:49.051123 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5csg_b22c865e-0000-410a-a062-d994e40b806b/extract-content/0.log" Nov 24 09:04:49 crc kubenswrapper[4718]: I1124 09:04:49.079863 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5csg_b22c865e-0000-410a-a062-d994e40b806b/extract-utilities/0.log" Nov 24 09:04:49 crc kubenswrapper[4718]: I1124 09:04:49.085637 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5csg_b22c865e-0000-410a-a062-d994e40b806b/extract-content/0.log" Nov 24 09:04:49 crc kubenswrapper[4718]: I1124 09:04:49.332910 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5csg_b22c865e-0000-410a-a062-d994e40b806b/extract-content/0.log" Nov 24 09:04:49 crc kubenswrapper[4718]: I1124 09:04:49.433892 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5csg_b22c865e-0000-410a-a062-d994e40b806b/extract-utilities/0.log" Nov 24 09:04:49 crc kubenswrapper[4718]: I1124 09:04:49.574013 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bnkbp_2f630e0b-0dbb-4498-a3dc-9cd80a7bf225/extract-utilities/0.log" Nov 24 09:04:49 crc kubenswrapper[4718]: I1124 09:04:49.745344 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-c5csg_b22c865e-0000-410a-a062-d994e40b806b/registry-server/0.log" Nov 24 09:04:49 crc kubenswrapper[4718]: I1124 09:04:49.781464 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bnkbp_2f630e0b-0dbb-4498-a3dc-9cd80a7bf225/extract-utilities/0.log" Nov 24 09:04:49 crc kubenswrapper[4718]: I1124 09:04:49.876375 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bnkbp_2f630e0b-0dbb-4498-a3dc-9cd80a7bf225/extract-content/0.log" Nov 24 09:04:49 crc kubenswrapper[4718]: I1124 09:04:49.876658 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bnkbp_2f630e0b-0dbb-4498-a3dc-9cd80a7bf225/extract-content/0.log" Nov 24 09:04:50 crc kubenswrapper[4718]: I1124 09:04:50.044537 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bnkbp_2f630e0b-0dbb-4498-a3dc-9cd80a7bf225/extract-utilities/0.log" Nov 24 09:04:50 crc kubenswrapper[4718]: I1124 09:04:50.081375 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bnkbp_2f630e0b-0dbb-4498-a3dc-9cd80a7bf225/extract-content/0.log" Nov 24 09:04:50 crc kubenswrapper[4718]: I1124 09:04:50.397732 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw_db65587c-3818-4123-ae2b-eb66c4cf3ddb/util/0.log" Nov 24 09:04:50 crc kubenswrapper[4718]: I1124 09:04:50.536037 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw_db65587c-3818-4123-ae2b-eb66c4cf3ddb/util/0.log" Nov 24 09:04:50 crc kubenswrapper[4718]: I1124 09:04:50.623813 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw_db65587c-3818-4123-ae2b-eb66c4cf3ddb/pull/0.log" Nov 24 09:04:50 crc kubenswrapper[4718]: I1124 09:04:50.656694 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bnkbp_2f630e0b-0dbb-4498-a3dc-9cd80a7bf225/registry-server/0.log" Nov 24 09:04:50 crc kubenswrapper[4718]: I1124 09:04:50.678462 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw_db65587c-3818-4123-ae2b-eb66c4cf3ddb/pull/0.log" Nov 24 09:04:50 crc kubenswrapper[4718]: I1124 09:04:50.844794 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw_db65587c-3818-4123-ae2b-eb66c4cf3ddb/pull/0.log" Nov 24 09:04:50 crc kubenswrapper[4718]: I1124 09:04:50.853774 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw_db65587c-3818-4123-ae2b-eb66c4cf3ddb/extract/0.log" Nov 24 09:04:50 crc kubenswrapper[4718]: I1124 09:04:50.874912 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6x8zrw_db65587c-3818-4123-ae2b-eb66c4cf3ddb/util/0.log" Nov 24 09:04:51 crc kubenswrapper[4718]: I1124 09:04:51.040034 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-fvqgs_ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2/extract-utilities/0.log" Nov 24 09:04:51 crc kubenswrapper[4718]: I1124 09:04:51.073779 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-zqgzd_5fb52afb-b455-44d0-ad14-36a8f2790af2/marketplace-operator/0.log" Nov 24 09:04:51 crc kubenswrapper[4718]: I1124 09:04:51.247062 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-fvqgs_ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2/extract-utilities/0.log" Nov 24 09:04:51 crc kubenswrapper[4718]: I1124 09:04:51.260386 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-fvqgs_ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2/extract-content/0.log" Nov 24 09:04:51 crc kubenswrapper[4718]: I1124 09:04:51.266579 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-fvqgs_ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2/extract-content/0.log" Nov 24 09:04:51 crc kubenswrapper[4718]: I1124 09:04:51.473882 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-fvqgs_ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2/registry-server/0.log" Nov 24 09:04:51 crc kubenswrapper[4718]: I1124 09:04:51.520769 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-fvqgs_ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2/extract-content/0.log" Nov 24 09:04:51 crc kubenswrapper[4718]: I1124 09:04:51.526601 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-fvqgs_ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2/extract-utilities/0.log" Nov 24 09:04:51 crc kubenswrapper[4718]: I1124 09:04:51.669570 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gkskr_bcef040a-e828-443f-80c3-4a3956da53c2/extract-utilities/0.log" Nov 24 09:04:51 crc kubenswrapper[4718]: I1124 09:04:51.846673 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gkskr_bcef040a-e828-443f-80c3-4a3956da53c2/extract-utilities/0.log" Nov 24 09:04:51 crc kubenswrapper[4718]: I1124 09:04:51.887858 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gkskr_bcef040a-e828-443f-80c3-4a3956da53c2/extract-content/0.log" Nov 24 09:04:51 crc kubenswrapper[4718]: I1124 09:04:51.917919 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gkskr_bcef040a-e828-443f-80c3-4a3956da53c2/extract-content/0.log" Nov 24 09:04:52 crc kubenswrapper[4718]: I1124 09:04:52.049494 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gkskr_bcef040a-e828-443f-80c3-4a3956da53c2/extract-utilities/0.log" Nov 24 09:04:52 crc kubenswrapper[4718]: I1124 09:04:52.065234 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gkskr_bcef040a-e828-443f-80c3-4a3956da53c2/extract-content/0.log" Nov 24 09:04:52 crc kubenswrapper[4718]: I1124 09:04:52.208641 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gkskr_bcef040a-e828-443f-80c3-4a3956da53c2/registry-server/0.log" Nov 24 09:04:52 crc kubenswrapper[4718]: I1124 09:04:52.301760 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r7tnb_fa32947a-3fd8-4557-beb9-58c89792425a/extract-utilities/0.log" Nov 24 09:04:52 crc kubenswrapper[4718]: I1124 09:04:52.487609 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r7tnb_fa32947a-3fd8-4557-beb9-58c89792425a/extract-content/0.log" Nov 24 09:04:52 crc kubenswrapper[4718]: I1124 09:04:52.489429 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r7tnb_fa32947a-3fd8-4557-beb9-58c89792425a/extract-utilities/0.log" Nov 24 09:04:52 crc kubenswrapper[4718]: I1124 09:04:52.528454 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r7tnb_fa32947a-3fd8-4557-beb9-58c89792425a/extract-content/0.log" Nov 24 09:04:52 crc kubenswrapper[4718]: I1124 09:04:52.700597 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r7tnb_fa32947a-3fd8-4557-beb9-58c89792425a/extract-utilities/0.log" Nov 24 09:04:52 crc kubenswrapper[4718]: I1124 09:04:52.711189 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r7tnb_fa32947a-3fd8-4557-beb9-58c89792425a/extract-content/0.log" Nov 24 09:04:53 crc kubenswrapper[4718]: I1124 09:04:53.220476 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fvqgs" Nov 24 09:04:53 crc kubenswrapper[4718]: I1124 09:04:53.220646 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fvqgs" Nov 24 09:04:53 crc kubenswrapper[4718]: I1124 09:04:53.221570 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r7tnb_fa32947a-3fd8-4557-beb9-58c89792425a/registry-server/0.log" Nov 24 09:04:53 crc kubenswrapper[4718]: I1124 09:04:53.268684 4718 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fvqgs" Nov 24 09:04:54 crc kubenswrapper[4718]: I1124 09:04:54.044054 4718 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fvqgs" Nov 24 09:04:56 crc kubenswrapper[4718]: I1124 09:04:56.860631 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fvqgs"] Nov 24 09:04:57 crc kubenswrapper[4718]: I1124 09:04:57.017806 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fvqgs" podUID="ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2" containerName="registry-server" containerID="cri-o://dc98f08beafd492918af841a0646a0b5bd7759757c97079eefb45ff7dda9c357" gracePeriod=2 Nov 24 09:04:57 crc kubenswrapper[4718]: I1124 09:04:57.423457 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fvqgs" Nov 24 09:04:57 crc kubenswrapper[4718]: I1124 09:04:57.555873 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2-catalog-content\") pod \"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2\" (UID: \"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2\") " Nov 24 09:04:57 crc kubenswrapper[4718]: I1124 09:04:57.555924 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2-utilities\") pod \"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2\" (UID: \"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2\") " Nov 24 09:04:57 crc kubenswrapper[4718]: I1124 09:04:57.556084 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rmww\" (UniqueName: \"kubernetes.io/projected/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2-kube-api-access-7rmww\") pod \"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2\" (UID: \"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2\") " Nov 24 09:04:57 crc kubenswrapper[4718]: I1124 09:04:57.556943 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2-utilities" (OuterVolumeSpecName: "utilities") pod "ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2" (UID: "ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 09:04:57 crc kubenswrapper[4718]: I1124 09:04:57.569839 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2-kube-api-access-7rmww" (OuterVolumeSpecName: "kube-api-access-7rmww") pod "ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2" (UID: "ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2"). InnerVolumeSpecName "kube-api-access-7rmww". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 09:04:57 crc kubenswrapper[4718]: I1124 09:04:57.582173 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2" (UID: "ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 09:04:57 crc kubenswrapper[4718]: I1124 09:04:57.657936 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rmww\" (UniqueName: \"kubernetes.io/projected/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2-kube-api-access-7rmww\") on node \"crc\" DevicePath \"\"" Nov 24 09:04:57 crc kubenswrapper[4718]: I1124 09:04:57.657987 4718 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 09:04:57 crc kubenswrapper[4718]: I1124 09:04:57.658001 4718 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 09:04:58 crc kubenswrapper[4718]: I1124 09:04:58.030378 4718 generic.go:334] "Generic (PLEG): container finished" podID="ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2" containerID="dc98f08beafd492918af841a0646a0b5bd7759757c97079eefb45ff7dda9c357" exitCode=0 Nov 24 09:04:58 crc kubenswrapper[4718]: I1124 09:04:58.030433 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fvqgs" event={"ID":"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2","Type":"ContainerDied","Data":"dc98f08beafd492918af841a0646a0b5bd7759757c97079eefb45ff7dda9c357"} Nov 24 09:04:58 crc kubenswrapper[4718]: I1124 09:04:58.030470 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fvqgs" event={"ID":"ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2","Type":"ContainerDied","Data":"435bb33135842a936263b5b086a0f5799abf3b5ffe5f770f8ff1d31e9f48bc57"} Nov 24 09:04:58 crc kubenswrapper[4718]: I1124 09:04:58.030466 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fvqgs" Nov 24 09:04:58 crc kubenswrapper[4718]: I1124 09:04:58.030487 4718 scope.go:117] "RemoveContainer" containerID="dc98f08beafd492918af841a0646a0b5bd7759757c97079eefb45ff7dda9c357" Nov 24 09:04:58 crc kubenswrapper[4718]: I1124 09:04:58.050142 4718 scope.go:117] "RemoveContainer" containerID="441b61fff1a28aa832efbded9e177dc83450331944df84ac7750127285f888d9" Nov 24 09:04:58 crc kubenswrapper[4718]: I1124 09:04:58.060566 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fvqgs"] Nov 24 09:04:58 crc kubenswrapper[4718]: I1124 09:04:58.068177 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fvqgs"] Nov 24 09:04:58 crc kubenswrapper[4718]: I1124 09:04:58.089137 4718 scope.go:117] "RemoveContainer" containerID="e0f1508d760cbeef108e51322d29655435b77c24e2796c408a1a0042ae93273e" Nov 24 09:04:58 crc kubenswrapper[4718]: I1124 09:04:58.108806 4718 scope.go:117] "RemoveContainer" containerID="dc98f08beafd492918af841a0646a0b5bd7759757c97079eefb45ff7dda9c357" Nov 24 09:04:58 crc kubenswrapper[4718]: E1124 09:04:58.109176 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc98f08beafd492918af841a0646a0b5bd7759757c97079eefb45ff7dda9c357\": container with ID starting with dc98f08beafd492918af841a0646a0b5bd7759757c97079eefb45ff7dda9c357 not found: ID does not exist" containerID="dc98f08beafd492918af841a0646a0b5bd7759757c97079eefb45ff7dda9c357" Nov 24 09:04:58 crc kubenswrapper[4718]: I1124 09:04:58.109213 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc98f08beafd492918af841a0646a0b5bd7759757c97079eefb45ff7dda9c357"} err="failed to get container status \"dc98f08beafd492918af841a0646a0b5bd7759757c97079eefb45ff7dda9c357\": rpc error: code = NotFound desc = could not find container \"dc98f08beafd492918af841a0646a0b5bd7759757c97079eefb45ff7dda9c357\": container with ID starting with dc98f08beafd492918af841a0646a0b5bd7759757c97079eefb45ff7dda9c357 not found: ID does not exist" Nov 24 09:04:58 crc kubenswrapper[4718]: I1124 09:04:58.109238 4718 scope.go:117] "RemoveContainer" containerID="441b61fff1a28aa832efbded9e177dc83450331944df84ac7750127285f888d9" Nov 24 09:04:58 crc kubenswrapper[4718]: E1124 09:04:58.109536 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"441b61fff1a28aa832efbded9e177dc83450331944df84ac7750127285f888d9\": container with ID starting with 441b61fff1a28aa832efbded9e177dc83450331944df84ac7750127285f888d9 not found: ID does not exist" containerID="441b61fff1a28aa832efbded9e177dc83450331944df84ac7750127285f888d9" Nov 24 09:04:58 crc kubenswrapper[4718]: I1124 09:04:58.109577 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"441b61fff1a28aa832efbded9e177dc83450331944df84ac7750127285f888d9"} err="failed to get container status \"441b61fff1a28aa832efbded9e177dc83450331944df84ac7750127285f888d9\": rpc error: code = NotFound desc = could not find container \"441b61fff1a28aa832efbded9e177dc83450331944df84ac7750127285f888d9\": container with ID starting with 441b61fff1a28aa832efbded9e177dc83450331944df84ac7750127285f888d9 not found: ID does not exist" Nov 24 09:04:58 crc kubenswrapper[4718]: I1124 09:04:58.109602 4718 scope.go:117] "RemoveContainer" containerID="e0f1508d760cbeef108e51322d29655435b77c24e2796c408a1a0042ae93273e" Nov 24 09:04:58 crc kubenswrapper[4718]: E1124 09:04:58.109850 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0f1508d760cbeef108e51322d29655435b77c24e2796c408a1a0042ae93273e\": container with ID starting with e0f1508d760cbeef108e51322d29655435b77c24e2796c408a1a0042ae93273e not found: ID does not exist" containerID="e0f1508d760cbeef108e51322d29655435b77c24e2796c408a1a0042ae93273e" Nov 24 09:04:58 crc kubenswrapper[4718]: I1124 09:04:58.109885 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0f1508d760cbeef108e51322d29655435b77c24e2796c408a1a0042ae93273e"} err="failed to get container status \"e0f1508d760cbeef108e51322d29655435b77c24e2796c408a1a0042ae93273e\": rpc error: code = NotFound desc = could not find container \"e0f1508d760cbeef108e51322d29655435b77c24e2796c408a1a0042ae93273e\": container with ID starting with e0f1508d760cbeef108e51322d29655435b77c24e2796c408a1a0042ae93273e not found: ID does not exist" Nov 24 09:04:58 crc kubenswrapper[4718]: I1124 09:04:58.607000 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2" path="/var/lib/kubelet/pods/ca8c24f8-f5ef-448a-bd2f-d69e567ebfe2/volumes" Nov 24 09:05:02 crc kubenswrapper[4718]: I1124 09:05:02.596426 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:05:02 crc kubenswrapper[4718]: E1124 09:05:02.596938 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:05:07 crc kubenswrapper[4718]: I1124 09:05:07.046288 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-db-create-b6tpd"] Nov 24 09:05:07 crc kubenswrapper[4718]: I1124 09:05:07.051171 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-7b23-account-create-update-s5bmh"] Nov 24 09:05:07 crc kubenswrapper[4718]: I1124 09:05:07.056061 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-7b23-account-create-update-s5bmh"] Nov 24 09:05:07 crc kubenswrapper[4718]: I1124 09:05:07.061352 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-db-create-b6tpd"] Nov 24 09:05:08 crc kubenswrapper[4718]: I1124 09:05:08.607164 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="798e367e-b020-428b-a39d-36fd0f8a0082" path="/var/lib/kubelet/pods/798e367e-b020-428b-a39d-36fd0f8a0082/volumes" Nov 24 09:05:08 crc kubenswrapper[4718]: I1124 09:05:08.607776 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2bd0da2-6ed3-47c4-9984-3c48e00fddad" path="/var/lib/kubelet/pods/a2bd0da2-6ed3-47c4-9984-3c48e00fddad/volumes" Nov 24 09:05:14 crc kubenswrapper[4718]: I1124 09:05:14.022075 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-db-sync-xs54k"] Nov 24 09:05:14 crc kubenswrapper[4718]: I1124 09:05:14.027371 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-db-sync-xs54k"] Nov 24 09:05:14 crc kubenswrapper[4718]: I1124 09:05:14.606340 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94a221c9-1794-4a91-b03a-c42a9bdcfcb9" path="/var/lib/kubelet/pods/94a221c9-1794-4a91-b03a-c42a9bdcfcb9/volumes" Nov 24 09:05:17 crc kubenswrapper[4718]: I1124 09:05:17.596228 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:05:17 crc kubenswrapper[4718]: E1124 09:05:17.596775 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:05:30 crc kubenswrapper[4718]: I1124 09:05:30.036959 4718 scope.go:117] "RemoveContainer" containerID="b5406ddc0b50a9931338235e6278d32488abdfb5b667b841598eb1bcb866ee86" Nov 24 09:05:30 crc kubenswrapper[4718]: I1124 09:05:30.061765 4718 scope.go:117] "RemoveContainer" containerID="868519ed7bb67d46e1a25d8a297da18e6b2d10aa894885340958e83f48e16203" Nov 24 09:05:30 crc kubenswrapper[4718]: I1124 09:05:30.092084 4718 scope.go:117] "RemoveContainer" containerID="2753676eb625477baf9352aa9145b9a630e77d1cfbd2ad80d3b6949a2f7b8a1f" Nov 24 09:05:30 crc kubenswrapper[4718]: I1124 09:05:30.597412 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:05:30 crc kubenswrapper[4718]: E1124 09:05:30.598027 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:05:44 crc kubenswrapper[4718]: I1124 09:05:44.601293 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:05:44 crc kubenswrapper[4718]: E1124 09:05:44.602350 4718 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-575gl_openshift-machine-config-operator(89887d07-87db-4f4f-a6fa-3cd34e814131)\"" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" Nov 24 09:05:55 crc kubenswrapper[4718]: I1124 09:05:55.596284 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" Nov 24 09:05:56 crc kubenswrapper[4718]: I1124 09:05:56.480185 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerStarted","Data":"5390d265736ec6cb7fabb1c883add5bc234265c69a14bf8407e229de7e97a79f"} Nov 24 09:06:02 crc kubenswrapper[4718]: I1124 09:06:02.545387 4718 generic.go:334] "Generic (PLEG): container finished" podID="caa366f9-42ff-44f5-abab-e68bbef875d0" containerID="2178def47b5245cf8b3869a6187af70b956c5780892d67c4ec9fc60a3317bd17" exitCode=0 Nov 24 09:06:02 crc kubenswrapper[4718]: I1124 09:06:02.545477 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8mmnm/must-gather-m55fg" event={"ID":"caa366f9-42ff-44f5-abab-e68bbef875d0","Type":"ContainerDied","Data":"2178def47b5245cf8b3869a6187af70b956c5780892d67c4ec9fc60a3317bd17"} Nov 24 09:06:02 crc kubenswrapper[4718]: I1124 09:06:02.546488 4718 scope.go:117] "RemoveContainer" containerID="2178def47b5245cf8b3869a6187af70b956c5780892d67c4ec9fc60a3317bd17" Nov 24 09:06:03 crc kubenswrapper[4718]: I1124 09:06:03.493769 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-8mmnm_must-gather-m55fg_caa366f9-42ff-44f5-abab-e68bbef875d0/gather/0.log" Nov 24 09:06:10 crc kubenswrapper[4718]: I1124 09:06:10.653791 4718 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-8mmnm/must-gather-m55fg"] Nov 24 09:06:10 crc kubenswrapper[4718]: I1124 09:06:10.654538 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-8mmnm/must-gather-m55fg" podUID="caa366f9-42ff-44f5-abab-e68bbef875d0" containerName="copy" containerID="cri-o://1f2e78b27358656a8ed32e7d3213bf2eaefa381ee6a85496aa8b2be57269dfd3" gracePeriod=2 Nov 24 09:06:10 crc kubenswrapper[4718]: I1124 09:06:10.664645 4718 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-8mmnm/must-gather-m55fg"] Nov 24 09:06:11 crc kubenswrapper[4718]: I1124 09:06:11.007438 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-8mmnm_must-gather-m55fg_caa366f9-42ff-44f5-abab-e68bbef875d0/copy/0.log" Nov 24 09:06:11 crc kubenswrapper[4718]: I1124 09:06:11.008239 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8mmnm/must-gather-m55fg" Nov 24 09:06:11 crc kubenswrapper[4718]: I1124 09:06:11.115495 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p62tm\" (UniqueName: \"kubernetes.io/projected/caa366f9-42ff-44f5-abab-e68bbef875d0-kube-api-access-p62tm\") pod \"caa366f9-42ff-44f5-abab-e68bbef875d0\" (UID: \"caa366f9-42ff-44f5-abab-e68bbef875d0\") " Nov 24 09:06:11 crc kubenswrapper[4718]: I1124 09:06:11.115564 4718 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/caa366f9-42ff-44f5-abab-e68bbef875d0-must-gather-output\") pod \"caa366f9-42ff-44f5-abab-e68bbef875d0\" (UID: \"caa366f9-42ff-44f5-abab-e68bbef875d0\") " Nov 24 09:06:11 crc kubenswrapper[4718]: I1124 09:06:11.122222 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/caa366f9-42ff-44f5-abab-e68bbef875d0-kube-api-access-p62tm" (OuterVolumeSpecName: "kube-api-access-p62tm") pod "caa366f9-42ff-44f5-abab-e68bbef875d0" (UID: "caa366f9-42ff-44f5-abab-e68bbef875d0"). InnerVolumeSpecName "kube-api-access-p62tm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 09:06:11 crc kubenswrapper[4718]: I1124 09:06:11.202790 4718 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/caa366f9-42ff-44f5-abab-e68bbef875d0-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "caa366f9-42ff-44f5-abab-e68bbef875d0" (UID: "caa366f9-42ff-44f5-abab-e68bbef875d0"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 09:06:11 crc kubenswrapper[4718]: I1124 09:06:11.216984 4718 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p62tm\" (UniqueName: \"kubernetes.io/projected/caa366f9-42ff-44f5-abab-e68bbef875d0-kube-api-access-p62tm\") on node \"crc\" DevicePath \"\"" Nov 24 09:06:11 crc kubenswrapper[4718]: I1124 09:06:11.217023 4718 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/caa366f9-42ff-44f5-abab-e68bbef875d0-must-gather-output\") on node \"crc\" DevicePath \"\"" Nov 24 09:06:11 crc kubenswrapper[4718]: I1124 09:06:11.608305 4718 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-8mmnm_must-gather-m55fg_caa366f9-42ff-44f5-abab-e68bbef875d0/copy/0.log" Nov 24 09:06:11 crc kubenswrapper[4718]: I1124 09:06:11.608756 4718 generic.go:334] "Generic (PLEG): container finished" podID="caa366f9-42ff-44f5-abab-e68bbef875d0" containerID="1f2e78b27358656a8ed32e7d3213bf2eaefa381ee6a85496aa8b2be57269dfd3" exitCode=143 Nov 24 09:06:11 crc kubenswrapper[4718]: I1124 09:06:11.608860 4718 scope.go:117] "RemoveContainer" containerID="1f2e78b27358656a8ed32e7d3213bf2eaefa381ee6a85496aa8b2be57269dfd3" Nov 24 09:06:11 crc kubenswrapper[4718]: I1124 09:06:11.608913 4718 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8mmnm/must-gather-m55fg" Nov 24 09:06:11 crc kubenswrapper[4718]: I1124 09:06:11.631350 4718 scope.go:117] "RemoveContainer" containerID="2178def47b5245cf8b3869a6187af70b956c5780892d67c4ec9fc60a3317bd17" Nov 24 09:06:11 crc kubenswrapper[4718]: I1124 09:06:11.679185 4718 scope.go:117] "RemoveContainer" containerID="1f2e78b27358656a8ed32e7d3213bf2eaefa381ee6a85496aa8b2be57269dfd3" Nov 24 09:06:11 crc kubenswrapper[4718]: E1124 09:06:11.679577 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f2e78b27358656a8ed32e7d3213bf2eaefa381ee6a85496aa8b2be57269dfd3\": container with ID starting with 1f2e78b27358656a8ed32e7d3213bf2eaefa381ee6a85496aa8b2be57269dfd3 not found: ID does not exist" containerID="1f2e78b27358656a8ed32e7d3213bf2eaefa381ee6a85496aa8b2be57269dfd3" Nov 24 09:06:11 crc kubenswrapper[4718]: I1124 09:06:11.679632 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f2e78b27358656a8ed32e7d3213bf2eaefa381ee6a85496aa8b2be57269dfd3"} err="failed to get container status \"1f2e78b27358656a8ed32e7d3213bf2eaefa381ee6a85496aa8b2be57269dfd3\": rpc error: code = NotFound desc = could not find container \"1f2e78b27358656a8ed32e7d3213bf2eaefa381ee6a85496aa8b2be57269dfd3\": container with ID starting with 1f2e78b27358656a8ed32e7d3213bf2eaefa381ee6a85496aa8b2be57269dfd3 not found: ID does not exist" Nov 24 09:06:11 crc kubenswrapper[4718]: I1124 09:06:11.679657 4718 scope.go:117] "RemoveContainer" containerID="2178def47b5245cf8b3869a6187af70b956c5780892d67c4ec9fc60a3317bd17" Nov 24 09:06:11 crc kubenswrapper[4718]: E1124 09:06:11.679871 4718 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2178def47b5245cf8b3869a6187af70b956c5780892d67c4ec9fc60a3317bd17\": container with ID starting with 2178def47b5245cf8b3869a6187af70b956c5780892d67c4ec9fc60a3317bd17 not found: ID does not exist" containerID="2178def47b5245cf8b3869a6187af70b956c5780892d67c4ec9fc60a3317bd17" Nov 24 09:06:11 crc kubenswrapper[4718]: I1124 09:06:11.679932 4718 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2178def47b5245cf8b3869a6187af70b956c5780892d67c4ec9fc60a3317bd17"} err="failed to get container status \"2178def47b5245cf8b3869a6187af70b956c5780892d67c4ec9fc60a3317bd17\": rpc error: code = NotFound desc = could not find container \"2178def47b5245cf8b3869a6187af70b956c5780892d67c4ec9fc60a3317bd17\": container with ID starting with 2178def47b5245cf8b3869a6187af70b956c5780892d67c4ec9fc60a3317bd17 not found: ID does not exist" Nov 24 09:06:12 crc kubenswrapper[4718]: I1124 09:06:12.606565 4718 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="caa366f9-42ff-44f5-abab-e68bbef875d0" path="/var/lib/kubelet/pods/caa366f9-42ff-44f5-abab-e68bbef875d0/volumes" Nov 24 09:08:22 crc kubenswrapper[4718]: I1124 09:08:22.045464 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 09:08:22 crc kubenswrapper[4718]: I1124 09:08:22.046097 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 09:08:52 crc kubenswrapper[4718]: I1124 09:08:52.045707 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 09:08:52 crc kubenswrapper[4718]: I1124 09:08:52.046339 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 09:09:22 crc kubenswrapper[4718]: I1124 09:09:22.045308 4718 patch_prober.go:28] interesting pod/machine-config-daemon-575gl container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 09:09:22 crc kubenswrapper[4718]: I1124 09:09:22.045864 4718 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 09:09:22 crc kubenswrapper[4718]: I1124 09:09:22.045905 4718 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-575gl" Nov 24 09:09:22 crc kubenswrapper[4718]: I1124 09:09:22.046455 4718 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5390d265736ec6cb7fabb1c883add5bc234265c69a14bf8407e229de7e97a79f"} pod="openshift-machine-config-operator/machine-config-daemon-575gl" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 09:09:22 crc kubenswrapper[4718]: I1124 09:09:22.046510 4718 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-575gl" podUID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerName="machine-config-daemon" containerID="cri-o://5390d265736ec6cb7fabb1c883add5bc234265c69a14bf8407e229de7e97a79f" gracePeriod=600 Nov 24 09:09:22 crc kubenswrapper[4718]: I1124 09:09:22.903455 4718 generic.go:334] "Generic (PLEG): container finished" podID="89887d07-87db-4f4f-a6fa-3cd34e814131" containerID="5390d265736ec6cb7fabb1c883add5bc234265c69a14bf8407e229de7e97a79f" exitCode=0 Nov 24 09:09:22 crc kubenswrapper[4718]: I1124 09:09:22.903501 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerDied","Data":"5390d265736ec6cb7fabb1c883add5bc234265c69a14bf8407e229de7e97a79f"} Nov 24 09:09:22 crc kubenswrapper[4718]: I1124 09:09:22.904044 4718 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-575gl" event={"ID":"89887d07-87db-4f4f-a6fa-3cd34e814131","Type":"ContainerStarted","Data":"eb52430e6d8d99cafdb44ef37f668397067e36bab4df6e098becf5b6d48cac66"} Nov 24 09:09:22 crc kubenswrapper[4718]: I1124 09:09:22.904070 4718 scope.go:117] "RemoveContainer" containerID="aebdc04055f99aa477f9a8a79a8d6f08fc7d09665a78aad7c366bb1c5cd8425f" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515111020316024433 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015111020317017351 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015111014012016467 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015111014012015437 5ustar corecore